From f93f46bb1aadcc908147fe47968f30c7aaca0e0a Mon Sep 17 00:00:00 2001 From: Chi Wai Chan Date: Thu, 1 Jun 2023 19:58:30 +0800 Subject: [PATCH] Initial commit --- .github/CODEOWNERS | 11 + .github/workflows/pr.yaml | 23 + .github/workflows/sonar.yaml | 14 + .gitignore | 43 ++ .pre-commit-config.yaml | 22 + LICENSE | 674 ++++++++++++++++++ Makefile | 64 ++ README.md | 64 ++ prometheus_hardware_exporter/__init__.py | 9 + prometheus_hardware_exporter/__main__.py | 43 ++ prometheus_hardware_exporter/collector.py | 111 +++ .../collectors/__init__.py | 1 + .../collectors/storcli.py | 132 ++++ prometheus_hardware_exporter/config.py | 50 ++ prometheus_hardware_exporter/core.py | 131 ++++ prometheus_hardware_exporter/exporter.py | 59 ++ prometheus_hardware_exporter/utils.py | 82 +++ pyproject.toml | 82 +++ rename.sh | 17 + requirements.txt | 3 + setup.py | 28 + snap/hooks/install | 8 + snap/snapcraft.yaml | 30 + sonar-project.properties | 8 + tests/functional/conftest.py | 64 ++ tests/functional/requirements.txt | 1 + tests/functional/test_snap.py | 41 ++ tests/unit/requirements.txt | 1 + tests/unit/test_cli.py | 32 + tests/unit/test_collector.py | 48 ++ tests/unit/test_config.py | 48 ++ tests/unit/test_core.py | 32 + tests/unit/test_exporter.py | 20 + .../unit/test_resources/cx_vall_show_all.txt | 84 +++ tests/unit/test_resources/show_all.txt | 57 ++ tests/unit/test_resources/show_ctrlcount.txt | 16 + tests/unit/test_storcli.py | 83 +++ tests/unit/test_utils.py | 34 + tox.ini | 74 ++ 39 files changed, 2344 insertions(+) create mode 100644 .github/CODEOWNERS create mode 100644 .github/workflows/pr.yaml create mode 100644 .github/workflows/sonar.yaml create mode 100644 .gitignore create mode 100644 .pre-commit-config.yaml create mode 100644 LICENSE create mode 100644 Makefile create mode 100644 README.md create mode 100644 prometheus_hardware_exporter/__init__.py create mode 100644 prometheus_hardware_exporter/__main__.py create mode 100644 prometheus_hardware_exporter/collector.py create mode 100644 prometheus_hardware_exporter/collectors/__init__.py create mode 100644 prometheus_hardware_exporter/collectors/storcli.py create mode 100644 prometheus_hardware_exporter/config.py create mode 100644 prometheus_hardware_exporter/core.py create mode 100644 prometheus_hardware_exporter/exporter.py create mode 100644 prometheus_hardware_exporter/utils.py create mode 100644 pyproject.toml create mode 100755 rename.sh create mode 100644 requirements.txt create mode 100644 setup.py create mode 100755 snap/hooks/install create mode 100644 snap/snapcraft.yaml create mode 100644 sonar-project.properties create mode 100644 tests/functional/conftest.py create mode 100644 tests/functional/requirements.txt create mode 100644 tests/functional/test_snap.py create mode 100644 tests/unit/requirements.txt create mode 100644 tests/unit/test_cli.py create mode 100644 tests/unit/test_collector.py create mode 100644 tests/unit/test_config.py create mode 100644 tests/unit/test_core.py create mode 100644 tests/unit/test_exporter.py create mode 100644 tests/unit/test_resources/cx_vall_show_all.txt create mode 100644 tests/unit/test_resources/show_all.txt create mode 100644 tests/unit/test_resources/show_ctrlcount.txt create mode 100644 tests/unit/test_storcli.py create mode 100644 tests/unit/test_utils.py create mode 100644 tox.ini diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..d3d7342 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,11 @@ +# This is a template `CODEOWNERS` file for snaps +# This file is managed by bootstack-charms-spec and should not be modified +# within individual snap repos. https://launchpad.net/bootstack-charms-spec + +# For more information about CODEOWNER, please refer to +# https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners#example-of-a-codeowners-file + +# These owners will be the default owners for everything in the repo. Unless a +# later match takes precedence, @canonical/bootstack will be requested for +# review when someone opens a pull request. +* @canonical/bootstack-reviewers diff --git a/.github/workflows/pr.yaml b/.github/workflows/pr.yaml new file mode 100644 index 0000000..864ae61 --- /dev/null +++ b/.github/workflows/pr.yaml @@ -0,0 +1,23 @@ +# This is a template `pr.yaml` file for snaps +# This file is managed by bootstack-charms-spec and should not be modified +# within individual snap repos. https://launchpad.net/bootstack-charms-spec + +name: PR workflow running lint checkers, unit and functional tests + +on: + pull_request: + types: [ opened, synchronize, reopened ] + branches: [ master, main ] + pull_request_review: + types: [ submitted ] + +jobs: + pr: + uses: canonical/bootstack-actions/.github/workflows/pull-request.yaml@main + secrets: inherit + with: + python-version-unit: "['3.8', '3.10']" + python-version-func: "3.10" + tox-version: "<4" + snapcraft: true + commands: "['make functional']" diff --git a/.github/workflows/sonar.yaml b/.github/workflows/sonar.yaml new file mode 100644 index 0000000..07c56fd --- /dev/null +++ b/.github/workflows/sonar.yaml @@ -0,0 +1,14 @@ +# This is a template `sonar.yaml` file for snaps +# This file is managed by bootstack-charms-spec and should not be modified +# within individual snap repos. https://launchpad.net/bootstack-charms-spec + +name: SonarCloud +on: + workflow_run: + workflows: [PR workflow running lint checkers, unit and functional tests] + types: [completed] + +jobs: + sonar: + uses: canonical/bootstack-actions/.github/workflows/sonar.yaml@main + secrets: inherit diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b6b2573 --- /dev/null +++ b/.gitignore @@ -0,0 +1,43 @@ +# This is a template `.gitignore` file for snaps +# This file is managed by bootstack-charms-spec and should not be modified +# within individual snap repos. https://launchpad.net/bootstack-charms-spec + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# Tests files and dir +.pytest_cache/ +.coverage +.tox +.venv +reports/ +**/report/ +htmlcov/ +.mypy_cache + +# Log files +*.log + +# IDEs +.idea/ +.vscode/ + +# vi +.*.swp + +# version data +repo-info + +# Python builds +deb_dist/ +dist/ +*.egg-info/ + +# Snaps +*.snap + +# Builds +.build/ +build/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..769738b --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,22 @@ +# This is a template `.pre-commit-config.yaml` file for snaps +# This file is managed by bootstack-charms-spec and should not be modified +# within individual snap repos. https://launchpad.net/bootstack-charms-spec + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + hooks: + - id: check-executables-have-shebangs + - id: check-merge-conflict + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-added-large-files + - id: check-json + - id: check-yaml + - repo: local + hooks: + - id: lint + name: lint + entry: make lint + language: system + types: [ python ] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..f288702 --- /dev/null +++ b/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..9cb0df0 --- /dev/null +++ b/Makefile @@ -0,0 +1,64 @@ +# This is a template `Makefile` file for snaps +# This file is managed by bootstack-charms-spec and should not be modified +# within individual snap repos. https://launchpad.net/bootstack-charms-spec + +PYTHON := /usr/bin/python3 + +PROJECTPATH=$(dir $(realpath ${MAKEFILE_LIST})) +SNAP_NAME=$(shell cat ${PROJECTPATH}/snap/snapcraft.yaml | grep -E '^name:' | awk '{print $$2}') +SNAP_FILE=${PROJECTPATH}/${SNAP_NAME}.snap + +help: + @echo "This project supports the following targets" + @echo "" + @echo " make help - show this text" + @echo " make clean - remove unneeded files" + @echo " make dev-environment - setup the development environment" + @echo " make build - build the snap" + @echo " make lint - run lint checkers" + @echo " make reformat - run lint tools to auto format code" + @echo " make unittests - run the tests defined in the unittest subdirectory" + @echo " make functional - run the tests defined in the functional subdirectory" + @echo " make test - run lint, proof, unittests and functional targets" + @echo " make pre-commit - run pre-commit checks on all the files" + @echo "" + +lint: + @echo "Running lint checks" + @tox -e lint + +unittests: + @echo "Running unit tests" + @tox -e unit -- ${UNIT_ARGS} + +test: lint unittests functional + @echo "Tests completed for the snap." + +reformat: + @echo "Reformat files with black and isort" + @tox -e reformat + +build: + @echo "Building the snap" + @snapcraft --use-lxd + @bash -c ./rename.sh + +clean: + @echo "Cleaning snap" + @snapcraft clean --use-lxd + @echo "Cleaning existing snap builds" + @rm -rf ${SNAP_FILE} + +dev-environment: + @echo "Creating virtualenv and installing pre-commit" + @tox -r -e dev-environment + +functional: build + @echo "Executing functional tests using built snap" + @TEST_SNAP=${SNAP_FILE} tox -e func -- ${FUNC_ARGS} + +pre-commit: + @tox -e pre-commit + +# The targets below don't depend on a file +.PHONY: help clean dev-environment build lint reformat unittests functional test pre-commit diff --git a/README.md b/README.md new file mode 100644 index 0000000..5fce1df --- /dev/null +++ b/README.md @@ -0,0 +1,64 @@ +# Prometheus Exporter Snap for charm-hardware + + +## Getting Started + +Install the snap from snap store: + +```bash +$ sudo snap install prometheus-hardware-exporter --classic +``` + +Start the exporter: + +```bash +$ sudo snap start prometheus-hardware-exporter +``` + +## Snap Configuration + +The install hook (`./snap/hooks/install`) will generate a default configuration +for the exporter. By default, the exporter is started at port 10000 with a +logging level of INFO. + +You can change the default configuration by editing + +```bash +$ /var/snap/prometheus-hardware-exporter/current/config.yaml +``` + +and then restart the snap by + +```bash +$ sudo snap restart prometheus-hardware-exporter +``` + +## Local Build and Testing + +You need `snapcraft` to build the snap: + +```bash +$ sudo snap install snapcraft --classic +``` + +Snapcraft also requires backend to create isolated build environment, you can +choose the following two backends: + +- [LXD](https://linuxcontainers.org/lxd/introduction/), which creates container + image build instances. It can be used inside virtual machines. +- [Multipass](https://multipass.run/), which creates virtual machine build + instances. It cannot be reliably used on platforms that do not support nested + virtualization. For instance, Multipass will most likely not run inside a + virtual machine itself. + +To build the snap: + +```bash +$ make build +``` + +To try the snap that was built, you can install it locally: + +```bash +$ sudo snap install --devmode ./$(grep -E "^name:" snap/snapcraft.yaml | awk '{print $2}').snap +``` diff --git a/prometheus_hardware_exporter/__init__.py b/prometheus_hardware_exporter/__init__.py new file mode 100644 index 0000000..8348d5c --- /dev/null +++ b/prometheus_hardware_exporter/__init__.py @@ -0,0 +1,9 @@ +"""Configuring global logger.""" + +import logging + +logging.basicConfig( + level=logging.WARNING, + datefmt="%Y-%m-%d %H:%M:%S", + format="%(asctime)s %(levelname)s %(message)s", +) diff --git a/prometheus_hardware_exporter/__main__.py b/prometheus_hardware_exporter/__main__.py new file mode 100644 index 0000000..78f42ef --- /dev/null +++ b/prometheus_hardware_exporter/__main__.py @@ -0,0 +1,43 @@ +"""Package entrypoint.""" + +import argparse +import logging + +from .collector import MegaRAIDCollector +from .config import DEFAULT_CONFIG, Config +from .exporter import Exporter + +root_logger = logging.getLogger() + + +def parse_command_line() -> argparse.Namespace: + """Command line parser. + + Parse command line arguments and return the arguments. + + Returns: + args: Command line arguments. + """ + parser = argparse.ArgumentParser( + prog=__package__, + description=__doc__, + ) + parser.add_argument("-c", "--config", help="Set configuration file.", default="", type=str) + args = parser.parse_args() + + return args + + +def main() -> None: + """Start the prometheus-hardware-exporter.""" + args = parse_command_line() + config = Config.load_config(config_file=args.config or DEFAULT_CONFIG) + root_logger.setLevel(logging.getLevelName(config.level)) + + exporter = Exporter(config.port) + exporter.register(MegaRAIDCollector()) + exporter.run() + + +if __name__ == "__main__": # pragma: no cover + main() diff --git a/prometheus_hardware_exporter/collector.py b/prometheus_hardware_exporter/collector.py new file mode 100644 index 0000000..e0b8910 --- /dev/null +++ b/prometheus_hardware_exporter/collector.py @@ -0,0 +1,111 @@ +"""Module for a collection of hardware collecters.""" + +from logging import getLogger +from typing import Dict, List + +from prometheus_client.metrics_core import GaugeMetricFamily + +from .collectors.storcli import StorCLI +from .core import BlockingCollector, Payload, Specification + +logger = getLogger(__name__) + + +class MegaRAIDCollector(BlockingCollector): + """Collector for MegaRAID controller.""" + + storcli = StorCLI() + + @property + def specifications(self) -> List[Specification]: + """Define MegaRAID metric specs.""" + return [ + Specification( + name="megaraid_controllers", + documentation="Total number of controllers", + labels=["hostname"], + metric_class=GaugeMetricFamily, + ), + Specification( + name="megaraid_virtual_drive", + documentation="Number of virtual drives", + labels=["controller_id"], + metric_class=GaugeMetricFamily, + ), + Specification( + name="megaraid_virtual_drive_state", + documentation="Indicates the state of virtual drive", + labels=["controller_id", "virtual_drive_id", "state"], + metric_class=GaugeMetricFamily, + ), + Specification( + name="megaraid_virtual_drive_cache_policy", + documentation="Indicates the cache policy of virtual drive", + labels=["controller_id", "virtual_drive_id", "cache_policy"], + metric_class=GaugeMetricFamily, + ), + Specification( + name="storcli_command_success", + documentation="Indicates the if command is successful or not", + labels=[], + metric_class=GaugeMetricFamily, + ), + ] + + def fetch(self) -> List[Payload]: + """Load the MegaRAID related information.""" + controller_payload, error_con = self.storcli.get_controllers() + virtual_drives_payload, error_vd = self.storcli.get_all_virtual_drives() + + if any([error_con, error_vd]): + logger.error( + "Failed to get MegaRAID controller information using %s", self.storcli.command + ) + return [ + Payload( + name="storcli_command_success", + labels=[], + value=0.0, + ) + ] + + payloads = [ + Payload( + name="megaraid_controllers", + labels=[controller_payload["hostname"]], # type: ignore[index] + value=controller_payload["count"], # type: ignore[index] + ), + Payload( + name="storcli_command_success", + labels=[], + value=1.0, + ), + ] + for ctrl_id, vds_payload in virtual_drives_payload.items(): # type: ignore[union-attr] + payloads.append( + Payload( + name="megaraid_virtual_drive", + labels=[str(ctrl_id)], + value=len(vds_payload), # type: ignore[arg-type] + ) + ) + for vd_payload in vds_payload: # type: ignore[union-attr] + payloads.append( + Payload( + name="megaraid_virtual_drive_state", + labels=[vd_payload["DG"], vd_payload["VD"], vd_payload["state"]], + value=1.0, + ) + ) + payloads.append( + Payload( + name="megaraid_virtual_drive_cache_policy", + labels=[vd_payload["DG"], vd_payload["VD"], vd_payload["cache"]], + value=1.0, + ) + ) + return payloads + + def process(self, payloads: List[Payload], datastore: Dict[str, Payload]) -> List[Payload]: + """Process the payload if needed.""" + return payloads diff --git a/prometheus_hardware_exporter/collectors/__init__.py b/prometheus_hardware_exporter/collectors/__init__.py new file mode 100644 index 0000000..a200ff9 --- /dev/null +++ b/prometheus_hardware_exporter/collectors/__init__.py @@ -0,0 +1 @@ +"""Init file for collectors submodule.""" diff --git a/prometheus_hardware_exporter/collectors/storcli.py b/prometheus_hardware_exporter/collectors/storcli.py new file mode 100644 index 0000000..d10384d --- /dev/null +++ b/prometheus_hardware_exporter/collectors/storcli.py @@ -0,0 +1,132 @@ +"""Collector for MegaRAID controller.""" + +import re +from logging import getLogger +from typing import Any, Dict, List, Optional, Tuple + +from ..utils import Command + +logger = getLogger(__name__) + + +class StorCLI(Command): + """Command line tool for MegaRAID Controller.""" + + prefix = "" + command = "storcli" + installed = False + + def _get_all_virtual_drives( + self, controller: int + ) -> Tuple[Optional[List[Dict[str, str]]], Optional[Exception]]: + """Get all virtual drive information in this controller. + + Equivalent to running `storcli /cx/vall show all` for controller "x". + + Returns: + virtual_drive: dictionary of a virtual drive information or None + error: an exception if there is any or None + """ + result, error = self(f"/c{controller}/vall show J") + if error: + return None, error + + dg_vd_regex = re.compile(r'"DG\/VD"\s*:\s*"(\d*)\/(\d*)"') + state_regex = re.compile(r'"State"\s*:\s*"(\w*)"') + cache_regex = re.compile(r'"Cache"\s*:\s*"(\w*)"') + dg_vd_matches = dg_vd_regex.findall(result) # type: ignore[arg-type] + state_matches = state_regex.findall(result) # type: ignore[arg-type] + cache_matches = cache_regex.findall(result) # type: ignore[arg-type] + if not all([dg_vd_matches, state_matches, cache_matches]): + return None, ValueError( + f"Controller {controller}: cannot get virtual drive information." + ) + + payloads = [] + for ctrl_id, virtual_device, state, cache in zip( + [i[0] for i in dg_vd_matches], + [i[1] for i in dg_vd_matches], + state_matches, + cache_matches, + ): + payloads.append( + { + "DG": ctrl_id, + "VD": virtual_device, + "state": state, + "cache": cache, + } + ) + return payloads, None + + def _get_controller_ids(self) -> Tuple[Optional[List[int]], Optional[Exception]]: + """Get controller ids. + + Returns: + ids: list of controller ids, or None + error: an exception if there is any, or None + """ + result, error = self("show ctrlcount J") + if error: + return None, ValueError("Cannot get controller ids.") + + num_controller_regex = re.compile(r'"Controller Count"\s*:\s*(?P\d*)') + num_match = num_controller_regex.search(result) # type: ignore[arg-type] + if not num_match: + error_msg = "Cannot get the number of controllers." + return None, ValueError(error_msg) + + return list(range(int(num_match.group("num")))), None + + def get_controllers(self) -> Tuple[Optional[Dict[str, Any]], Optional[Exception]]: + """Get the number of controller. + + Returns: + payload: a dictionary of number of controller and hostname, or None + error: an exception if there is any, or None + """ + result, error = self("show all J") + if error: + logger.error("Cannot get the number of controllers.") + return None, error + + num_controller_regex = re.compile(r'"Number of Controllers"\s*:\s*(?P\d*)') + hostename_regex = re.compile(r'"Host Name"\s*:\s*"(?P\w*)"') + num_match = num_controller_regex.search(result) # type: ignore[arg-type] + hostname_match = hostename_regex.search(result) # type: ignore[arg-type] + if not all([num_match, hostname_match]): + error_msg = "Cannot get controller's information." + logger.error(error_msg) + return None, ValueError(error_msg) + + payload = { + "count": int(num_match.group("num")), # type: ignore[union-attr] + "hostname": hostname_match.group("hostname"), # type: ignore[union-attr] + } + return payload, None + + def get_all_virtual_drives( + self, + ) -> Tuple[Optional[Dict[int, Optional[List[Dict[str, str]]]]], Optional[Exception]]: + """Get all virtual drive information. + + Equivalent to running `storcli /cx/vall show all` for all controller "x". + + Returns: + virtual_drives: dictionary of all virtual drive information or None + error: an exception if there is any or None + """ + ids, error = self._get_controller_ids() + if error: + logger.error(str(error)) + return None, error + + payload = {} + if ids: + for controller_id in ids: + vd_payload, error = self._get_all_virtual_drives(controller_id) + if error: + logger.error(str(error)) + return None, error + payload[controller_id] = vd_payload + return payload, None diff --git a/prometheus_hardware_exporter/config.py b/prometheus_hardware_exporter/config.py new file mode 100644 index 0000000..c1b5f6b --- /dev/null +++ b/prometheus_hardware_exporter/config.py @@ -0,0 +1,50 @@ +"""Module for hardware related configuration.""" + +import os +from logging import getLogger + +from pydantic import BaseModel, validator +from yaml import safe_load + +logger = getLogger(__name__) + +DEFAULT_CONFIG = os.path.join(os.environ.get("SNAP_DATA", "./"), "config.yaml") + + +class Config(BaseModel): + """Juju backup all configuration.""" + + port: int = 10000 + level: str = "DEBUG" + + @validator("port") + def validate_port_range(cls, port: int) -> int: # noqa: N805 pylint: disable=E0213 + """Validate port range.""" + if not 1 <= port <= 65535: + msg = "Port must be in [1, 65535]." + logger.error(msg) + raise ValueError(msg) + return port + + @validator("level") + def validate_level_choice(cls, level: str) -> str: # noqa: N805 pylint: disable=E0213 + """Validate logging level choice.""" + level = level.upper() + choices = {"DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"} + if level not in choices: + msg = f"Level must be in {choices} (case-insensitive)." + logger.error(msg) + raise ValueError(msg) + return level + + @classmethod + def load_config(cls, config_file: str = DEFAULT_CONFIG) -> "Config": + """Load configuration file and validate it.""" + if not os.path.exists(config_file): + msg = f"Configuration file: {config_file} not exists." + logger.error(msg) + raise ValueError(msg) + with open(config_file, "r", encoding="utf-8") as config: + logger.info("Loaded exporter configuration: %s.", config_file) + data = safe_load(config) or {} + return cls(**data) diff --git a/prometheus_hardware_exporter/core.py b/prometheus_hardware_exporter/core.py new file mode 100644 index 0000000..012c5d8 --- /dev/null +++ b/prometheus_hardware_exporter/core.py @@ -0,0 +1,131 @@ +"""Module for collecter core codes.""" + +from abc import abstractmethod +from dataclasses import dataclass +from logging import getLogger +from typing import Dict, Iterable, List, Optional, Type + +from prometheus_client.metrics_core import Metric +from prometheus_client.registry import Collector + +from .config import Config + +logger = getLogger(__name__) + + +@dataclass +class Payload: + """Container of data for each timeseries.""" + + name: str + value: float + labels: List[str] + uuid: str = "" # timeseries's name + + def __post_init__(self) -> None: + """Create uuid based on metric name and labels.""" + self.uuid = f"{self.name}({self.labels})" + + +@dataclass +class Specification: + """Specification for metrics.""" + + name: str + labels: List[str] + documentation: str + metric_class: Type[Metric] + + +class BlockingCollector(Collector): + """Base class for blocking collector. + + BlockingCollector base class is intended to be used when the collector is + fetching data in a blocking fashion. For example, if the fetching process + is reading data from files. + """ + + def __init__(self, config: Optional[Config] = None) -> None: + """Initialize the class.""" + self.config = config + self._datastore: Dict[str, Payload] = {} + self._specs = {spec.name: spec for spec in self.specifications} + + @abstractmethod + def fetch(self) -> List[Payload]: + """User defined method for fetching data. + + User should defined their own method for loading data synchronously. + The return should be a list of `Payload` class; the return value will + be passed to user defined `process` method that should define how the + data are used to update the metris. + + Returns: + A list of payloads to be processed. + """ + + @abstractmethod + def process(self, payloads: List[Payload], datastore: Dict[str, Payload]) -> List[Payload]: + """User defined method for processing the fetched data. + + User should defined their own method for processing payloads. This + includes how to update the metrics using the payloads, and returns the + processed payload. + + Args: + payloads: the fetched data to be processed. + datastore: the data store for holding previous payloads. + + Returns: + The proecssed payloads. + """ + + @property + @abstractmethod + def specifications(self) -> List[Specification]: + """User defined property that defines the metrics. + + Returns: + A list of specification. + """ + + def init_default_datastore(self, payloads: List[Payload]) -> None: + """Initialize or fill data the store with default values. + + Args: + payloads: the fetched data to be processed. + """ + for payload in payloads: + if payload.uuid not in self._datastore: + self._datastore[payload.uuid] = Payload( + name=payload.name, labels=payload.labels, value=0.0 + ) + + def collect(self) -> Iterable[Metric]: + """Fetch data and update the internal metrics. + + This is a callback method that is used internally within + `prometheus_client` every time the exporter server is queried. There is + not return values for this method but it needs to yield all the metrics. + + Yields: + metrics: the internal metrics + """ + payloads = self.fetch() + self.init_default_datastore(payloads) + processed_payloads = self.process(payloads, self._datastore) + + # unpacked and create metrics + for payload in processed_payloads: + spec = self._specs[payload.name] + # We have to ignore the type checking here, since the subclass of + # any metric family from prometheus client adds new attributes and + # methods. + metric = spec.metric_class( # type: ignore[call-arg] + name=spec.name, labels=spec.labels, documentation=spec.documentation + ) + metric.add_metric( # type: ignore[attr-defined] + labels=payload.labels, value=payload.value + ) + yield metric + self._datastore[payload.uuid] = payload diff --git a/prometheus_hardware_exporter/exporter.py b/prometheus_hardware_exporter/exporter.py new file mode 100644 index 0000000..252ecf4 --- /dev/null +++ b/prometheus_hardware_exporter/exporter.py @@ -0,0 +1,59 @@ +"""Module for hardware exporter.""" + +import threading +from logging import getLogger +from socketserver import ThreadingMixIn +from typing import Any +from wsgiref.simple_server import WSGIRequestHandler, WSGIServer, make_server + +from prometheus_client import make_wsgi_app +from prometheus_client.core import REGISTRY +from prometheus_client.registry import Collector + +logger = getLogger(__name__) + + +class ThreadingWSGIServer(ThreadingMixIn, WSGIServer): + """A WSGI server that handle requests in a separate thread.""" + + daemon_threads = True + + +class SlientRequestHandler(WSGIRequestHandler): + """A Slient Request handler.""" + + def log_message(self, format: str, *args: Any) -> None: # pylint: disable=W0622 + """Log nothing.""" + + +class Exporter: + """The exporter class.""" + + def __init__(self, port: int, addr: str = "0.0.0.0") -> None: + """Initialize the exporter class. + + Args: + port: Start the exporter at this port. + addr: Start the exporter at this address. + """ + self.addr = addr + self.port = int(port) + self.app = make_wsgi_app() + + def register(self, collector: Collector) -> None: + """Register collector to the exporter.""" + REGISTRY.register(collector) + + def run(self, daemon: bool = False) -> None: + """Start the exporter server.""" + httpd = make_server( + self.addr, + self.port, + self.app, + server_class=ThreadingWSGIServer, + handler_class=SlientRequestHandler, + ) + logger.info("Started promethesus hardware exporter at %s:%s.", self.addr, self.port) + thread = threading.Thread(target=httpd.serve_forever) + thread.daemon = daemon + thread.start() diff --git a/prometheus_hardware_exporter/utils.py b/prometheus_hardware_exporter/utils.py new file mode 100644 index 0000000..81b75de --- /dev/null +++ b/prometheus_hardware_exporter/utils.py @@ -0,0 +1,82 @@ +"""Module for helper functions.""" + +import subprocess +from logging import getLogger +from typing import Optional, Tuple + +logger = getLogger(__name__) + + +class CommandNotFoundError(Exception): + """Command not found error.""" + + +class CommandExecutionError(Exception): + """Command excution error error.""" + + +class Command: + """Generic wrapper for command line tool.""" + + prefix = "" + command = "" + installed = False + + def __init__(self) -> None: + """Initialize the Command class.""" + self.installed = self.check_installed() + + def __call__(self, args: Optional[str] = None) -> Tuple[Optional[str], Optional[Exception]]: + """Run the command, and return the result and error. + + Returns: + result: output of the command or None + error: an exception of None + """ + if not self.installed: + error = CommandNotFoundError(f"{self.command} not installed.") + logger.error(str(error)) + return None, error + + result, error = self.check_output(args=args) # type: ignore[assignment] + if error: + logger.error(str(error)) + return None, error + return result, error + + def check_installed(self) -> bool: + """Check if the command is installed or not. + + Returns: + True if the command can be found else False. + """ + _, error = self.check_output(prefix="", command=f"which {self.command}") + if error: + return False + return True + + def check_output( + self, + prefix: Optional[str] = None, + command: Optional[str] = None, + args: Optional[str] = None, + ) -> Tuple[Optional[str], Optional[CommandExecutionError]]: + """Run the command, and return the result and error. + + Returns: + result: output of the command or None + error: an exception of None + """ + error = None + result = None + args = args if args is not None else "" + prefix = prefix if prefix is not None else self.prefix + command = command if command is not None else self.command + full_command = " ".join([prefix, command, args]).strip() + + logger.debug("Running command: %s", full_command) + try: + result = subprocess.check_output(full_command, shell=True).decode().strip() + except subprocess.CalledProcessError as err: + error = CommandExecutionError(err) + return result, error diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..943db6c --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,82 @@ +# This is a template `pyproject.toml` file for snaps +# This file is managed by bootstack-charms-spec and should not be modified +# within individual snap repos. https://launchpad.net/bootstack-charms-spec + +[tool.flake8] +ignore = ["C901", "D100", "D101", "D102", "D103", "W503", "W504"] +exclude = ['.eggs', '.git', '.tox', '.venv', '.build', 'build', 'report'] +max-line-length = 99 +max-complexity = 10 + +[tool.black] +line-length = 99 +exclude = ''' +/( + | .eggs + | .git + | .tox + | .venv + | .build + | build + | report +)/ +''' + +[tool.isort] +profile = "black" +skip_glob = [ + ".eggs", + ".git", + ".tox", + ".venv", + ".build", + "build", + "report" +] + +[tool.pylint] +max-line-length = 99 +ignore-paths = [ + ".eggs", + ".git", + ".tox", + ".venv", + ".build", + "report", + "tests", +] + +[tool.pylint.'MESSAGES CONTROL'] +extension-pkg-allow-list = "pydantic" # Fix pydantic issue in pylint, see https://github.com/pydantic/pydantic/issues/1961 + +[tool.mypy] +warn_unused_ignores = true +warn_unused_configs = true +warn_unreachable = true +disallow_untyped_defs = true +ignore_missing_imports = true +exclude = [ + ".eggs", + ".git", + ".tox", + ".venv", + ".build", + "lib", + "report", + "tests", +] + +[tool.coverage.run] +relative_files = true +source = ["."] +omit = ["tests/**", "docs/**", "lib/**", "snap/**", "build/**", "setup.py"] + +[tool.coverage.report] +fail_under = 100 +show_missing = true + +[tool.coverage.html] +directory = "tests/unit/report/html" + +[tool.coverage.xml] +output = "tests/unit/report/coverage.xml" diff --git a/rename.sh b/rename.sh new file mode 100755 index 0000000..6d645e1 --- /dev/null +++ b/rename.sh @@ -0,0 +1,17 @@ +#!/bin/bash +# This is a template `rename.sh` file for snaps +# This file is managed by bootstack-charms-spec and should not be modified +# within individual snap repos. https://launchpad.net/bootstack-charms-spec + +snap=$(grep -E "^name:" snap/snapcraft.yaml | awk '{print $2}') +echo "renaming ${snap}_*.snap to ${snap}.snap" +echo -n "pwd: " +pwd +ls -al +echo "Removing previous snap if it exists" +if [[ -e "${snap}.snap" ]]; +then + rm "${snap}.snap" +fi +echo "Renaming snap here." +mv ${snap}_*.snap ${snap}.snap diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..0d11fbd --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +prometheus-client +pydantic +pyyaml diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..98ab668 --- /dev/null +++ b/setup.py @@ -0,0 +1,28 @@ +"""Entrypoint for python package.""" + +from setuptools import setup + +configs = { + "name": "prometheus-hardware-exporter", + "description": "exports hardware related metrics", + "use_scm_version": True, + "setup_requires": ["setuptools_scm", "pyyaml"], + "author": "Canonical BootStack DevOps Centres", + "packages": ["prometheus_hardware_exporter", "prometheus_hardware_exporter.collectors"], + "url": "https://github.com/canonical/prometheus-hardware-exporter", + "entry_points": { + "console_scripts": [ + "prometheus-hardware-exporter=" + "prometheus_hardware_exporter.__main__:main", + ] + }, +} + +with open("LICENSE", encoding="utf-8") as f: + configs.update({"license": f.read()}) + +with open("README.md", encoding="utf-8") as f: + configs.update({"long_description": f.read()}) + + +if __name__ == "__main__": + setup(**configs) diff --git a/snap/hooks/install b/snap/hooks/install new file mode 100755 index 0000000..abc511b --- /dev/null +++ b/snap/hooks/install @@ -0,0 +1,8 @@ +#!/bin/sh + +# Generate a default config.yaml + +cat < $SNAP_DATA/config.yaml +port: 10000 +level: INFO +EOF diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml new file mode 100644 index 0000000..e67013c --- /dev/null +++ b/snap/snapcraft.yaml @@ -0,0 +1,30 @@ +name: prometheus-hardware-exporter +base: core22 +adopt-info: prometheus-hardware-exporter +summary: collects backup results and exports them as metrics +description: | + The prometheus-hardware-exporter is a snap for collecting backup + results from charm-hardware, and export those results as prometheus + metrics. The metrics are expected to be used with prometheus. + +grade: stable +confinement: classic + +apps: + prometheus-hardware-exporter: + daemon: simple + install-mode: disable + restart-condition: on-abnormal + command: bin/prometheus-hardware-exporter + plugs: + - network-bind + +parts: + prometheus-hardware-exporter: + source: . + plugin: python + python-requirements: [./requirements.txt] + override-build: | + snapcraftctl build + echo "Version: $(python3 setup.py --version)" + snapcraftctl set-version "$(python3 setup.py --version)" diff --git a/sonar-project.properties b/sonar-project.properties new file mode 100644 index 0000000..6a57aef --- /dev/null +++ b/sonar-project.properties @@ -0,0 +1,8 @@ +sonar.projectKey = prometheus-hardware-exporter +sonar.organization = bootstack-devops +sonar.python.version = 3 +sonar.python.coverage.reportPaths = tests/unit/report/coverage.xml +sonar.sources = prometheus_hardware_exporter/ +sonar.tests = tests/ +sonar.test.inclusions = test/** +sonar.coverage.exclusions=tests/**, docs/**, contrib/**, snap/**, * diff --git a/tests/functional/conftest.py b/tests/functional/conftest.py new file mode 100644 index 0000000..bfd421a --- /dev/null +++ b/tests/functional/conftest.py @@ -0,0 +1,64 @@ +import logging +import os +import time +from subprocess import check_call + +import pytest + +TMP_DIR = "/tmp" +SETUP_TIMEOUT = 5 + + +@pytest.fixture() +def available_metric_names(): + return [ + "megaraid_controllers", + "megaraid_virtual_drive", + "megaraid_virtual_drive_state", + "megaraid_virtual_drive_cache_policy", + "storcli_command_success", + ] + + +@pytest.fixture(scope="session") +def snap_name(): + return "prometheus-hardware-exporter" + + +@pytest.fixture(scope="session") +def snap_config(snap_name): + return f"/var/snap/{snap_name}/current/config.yaml" + + +@pytest.fixture(scope="session", autouse=True) +def setup_snap(snap_name): + """Install the package to the system and cleanup afterwards. + + Note: an environment variable TEST_SNAP is needed to install the snap. + """ + test_snap = os.environ.get("TEST_SNAP", None) + if test_snap: + logging.info("Installing %s snap package...", test_snap) + assert os.path.isfile(test_snap) + assert check_call(f"sudo snap install --dangerous {test_snap}".split()) == 0 # noqa + assert check_call(f"sudo snap start {snap_name}".split()) == 0 + else: + logging.error( + "Could not find %s snap package for testing. Needs to build it first.", + snap_name, + ) + + down = 1 + stime = time.time() + while time.time() - stime < SETUP_TIMEOUT and down: + try: + down = check_call("curl http://localhost:10000".split()) + except Exception: + time.sleep(0.5) + assert not down, "Snap is not up." + + yield test_snap + + if test_snap: + logging.info("Removing %s snap package...", snap_name) + check_call(f"sudo snap remove {snap_name}".split()) diff --git a/tests/functional/requirements.txt b/tests/functional/requirements.txt new file mode 100644 index 0000000..e079f8a --- /dev/null +++ b/tests/functional/requirements.txt @@ -0,0 +1 @@ +pytest diff --git a/tests/functional/test_snap.py b/tests/functional/test_snap.py new file mode 100644 index 0000000..ed58309 --- /dev/null +++ b/tests/functional/test_snap.py @@ -0,0 +1,41 @@ +#!/usr/bin/python3 +"""Test exporter snap.""" +import os +import re +from subprocess import run + +SNAP_NAME = "prometheus-hardware-exporter" + + +def test_default_config_installed(snap_config): + """Check if the snap default config exists.""" + assert os.path.exists(snap_config) + + +def test_snap_active(snap_name): + """Check if the snap is in active state.""" + result = run( + f"systemctl is-active snap.{snap_name}.{snap_name}.service", + shell=True, + capture_output=True, + ) + assert result.returncode == 0 + assert result.stdout.decode().strip() == "active" + + +def test_exporter_http_server(): + """Check if http server is running.""" + result = run("curl http://localhost:10000", shell=True, capture_output=True) + assert result.returncode == 0 + + +def test_exporter_metrics_created(available_metric_names): + """Check if hardware related metric is created.""" + result = run("curl http://localhost:10000", shell=True, capture_output=True) + assert result.returncode == 0 + + output = result.stdout.decode().strip() + for metric_name in available_metric_names: + pattern = metric_name + match = re.search(pattern, output).group() + assert match is not None diff --git a/tests/unit/requirements.txt b/tests/unit/requirements.txt new file mode 100644 index 0000000..e079f8a --- /dev/null +++ b/tests/unit/requirements.txt @@ -0,0 +1 @@ +pytest diff --git a/tests/unit/test_cli.py b/tests/unit/test_cli.py new file mode 100644 index 0000000..d94daab --- /dev/null +++ b/tests/unit/test_cli.py @@ -0,0 +1,32 @@ +from unittest.mock import Mock, patch + +from prometheus_hardware_exporter import __main__ +from prometheus_hardware_exporter.__main__ import main, parse_command_line + + +class TestCli: + """Cli test class.""" + + @patch("argparse.ArgumentParser") + def test_parse_argument(self, mock_argument_parser): + parse_command_line() + mock_argument_parser.assert_called_once() + + @patch.object(__main__, "parse_command_line") + @patch.object(__main__, "Exporter") + @patch.object(__main__, "Config") + @patch("logging.getLevelName") + def test_cli_main( + self, + mock_get_level_name, + mock_config, + mock_exporter, + mock_main_parse_command_line, + ): + """Test main function in cli.""" + mock_main_parse_command_line.return_value = Mock() + mock_get_level_name.return_value = "DEBUG" + main() + mock_main_parse_command_line.assert_called_once() + mock_config.load_config.assert_called_once() + mock_exporter.assert_called_once() diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py new file mode 100644 index 0000000..047c87e --- /dev/null +++ b/tests/unit/test_collector.py @@ -0,0 +1,48 @@ +import unittest +from unittest.mock import Mock + +from prometheus_hardware_exporter.collector import MegaRAIDCollector + + +class TestCustomCollector(unittest.TestCase): + """Custom test class.""" + + @classmethod + def setUpClass(cls): + cls.mock_config = Mock() + + def test_00_mega_raid_collector_not_installed(self): + """Test mega raid collector when storcli is not installed.""" + mega_raid_collector = MegaRAIDCollector(self.mock_config) + payloads = mega_raid_collector.collect() + + self.assertEqual(len(list(payloads)), 1) + + def test_01_mega_raid_collector_installed_and_okay(self): + """Test mega raid collector can fetch correct number of metrics.""" + mega_raid_collector = MegaRAIDCollector(self.mock_config) + mega_raid_collector.storcli = Mock() + + mock_controller_payload = {"count": 1, "hostname": "kongfu"} + mock_virtual_drives_payload = { + "0": [ + { + "DG": 0, + "VD": 239, + "state": "Optl", + "cache": "NRWTD", + } + ], + } + mega_raid_collector.storcli.get_controllers.return_value = mock_controller_payload, False + mega_raid_collector.storcli.get_all_virtual_drives.return_value = ( + mock_virtual_drives_payload, + False, + ) + + payloads = mega_raid_collector.collect() + + available_metrics = [spec.name for spec in mega_raid_collector.specifications] + self.assertEqual(len(list(payloads)), len(available_metrics)) + for payload in payloads: + self.assertIn(payload.name, available_metrics) diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py new file mode 100644 index 0000000..6c4784b --- /dev/null +++ b/tests/unit/test_config.py @@ -0,0 +1,48 @@ +import unittest +from unittest.mock import patch + +import pytest + +from prometheus_hardware_exporter.__main__ import Config + + +class TestConfig(unittest.TestCase): + """Config test class.""" + + def setUp(self): + self.patch_open_file = patch("builtins.open") + self.patch_os_path_exists = patch("os.path.exists", return_value=True) + self.patch_open_file.start() + self.patch_os_path_exists.start() + + def tearDown(self): + self.patch_open_file.stop() + self.patch_os_path_exists.stop() + + @patch("prometheus_hardware_exporter.config.safe_load") + def test_valid_config(self, mock_safe_load): + """Test valid config.""" + mock_safe_load.return_value = { + "port": 10000, + "level": "INFO", + } + config = Config.load_config() + assert config.port == 10000 + assert config.level == "INFO" + + @patch("prometheus_hardware_exporter.config.safe_load") + def test_invalid_config(self, mock_safe_load): + """Test invalid config.""" + mock_safe_load.return_value = { + "port": -10000, + "level": "RANDOM", + } + with pytest.raises(ValueError): + Config.load_config() + + def test_missing_config(self): + """Test missing config.""" + self.patch_open_file.stop() + self.patch_os_path_exists.stop() + with pytest.raises(ValueError): + Config.load_config("random") diff --git a/tests/unit/test_core.py b/tests/unit/test_core.py new file mode 100644 index 0000000..ba08e0c --- /dev/null +++ b/tests/unit/test_core.py @@ -0,0 +1,32 @@ +import unittest +from unittest.mock import Mock, patch + +import pytest + +from prometheus_hardware_exporter.core import BlockingCollector, Payload, Specification + + +class TestBlockingCollector(unittest.TestCase): + """BlockingCollector test class.""" + + def setUp(self): + self.mock_payloads = [Payload(name="abc", labels=[], value=0)] + self.mock_specifications = [ + Specification(name="abc", documentation="", labels=[], metric_class=Mock()) + ] + + def test_cannot_init_collector_base(self): + """Test cannot initialize CollectorBase.""" + with pytest.raises(TypeError): + BlockingCollector() + + @patch.multiple(BlockingCollector, __abstractmethods__=set()) + def test_sync_collector_class_collect(self): + """Test sync collector class's collect method.""" + BlockingCollector.fetch = Mock(return_value=self.mock_payloads) + BlockingCollector.process = Mock(return_value=self.mock_payloads) + BlockingCollector.specifications = self.mock_specifications + self.test_subclass = BlockingCollector(Mock()) + list(self.test_subclass.collect()) # need list() because it's a generator + self.test_subclass.fetch.assert_called() + self.test_subclass.process.assert_called() diff --git a/tests/unit/test_exporter.py b/tests/unit/test_exporter.py new file mode 100644 index 0000000..b426013 --- /dev/null +++ b/tests/unit/test_exporter.py @@ -0,0 +1,20 @@ +from unittest.mock import Mock, patch + +from prometheus_hardware_exporter import exporter +from prometheus_hardware_exporter.exporter import Exporter + + +class TestExporter: + """Exporter test class.""" + + @patch.object(exporter, "threading") + @patch.object(exporter, "REGISTRY") + @patch.object(exporter, "make_server") + def test_exporter(self, mock_make_server, mock_registry, mock_threading): + exporter = Exporter(10000) + exporter.register(Mock()) + exporter.run(daemon=True) + + mock_make_server.assert_called_once() + mock_registry.register.assert_called_once() + mock_threading.Thread.assert_called_once() diff --git a/tests/unit/test_resources/cx_vall_show_all.txt b/tests/unit/test_resources/cx_vall_show_all.txt new file mode 100644 index 0000000..89bd4f7 --- /dev/null +++ b/tests/unit/test_resources/cx_vall_show_all.txt @@ -0,0 +1,84 @@ +{ +"Controllers":[ +{ + "Command Status" : { + "CLI Version" : "007.2408.0000.0000 Nov 15, 2022", + "Operating system" : "Linux 5.15.0-72-generic", + "Controller" : 0, + "Status" : "Success", + "Description" : "None" + }, + "Response Data" : { + "/c0/v239" : [ + { + "DG/VD" : "0/239", + "TYPE" : "RAID1", + "State" : "Optl", + "Access" : "RW", + "Consist" : "Yes", + "Cache" : "NRWTD", + "Cac" : "-", + "sCC" : "ON", + "Size" : "744.687 GiB", + "Name" : "NVMe-RAID-1" + } + ], + "PDs for VD 239" : [ + { + "EID:Slt" : "251:1", + "DID" : 0, + "State" : "Onln", + "DG" : 0, + "Size" : "800.00 GB", + "Intf" : "NVMe", + "Med" : "SSD", + "SED" : "N", + "PI" : "N", + "SeSz" : "512B", + "Model" : "MZXLR800HBHQ-000H3 ", + "Sp" : "U", + "Type" : "-" + }, + { + "EID:Slt" : "251:2", + "DID" : 1, + "State" : "Onln", + "DG" : 0, + "Size" : "800.00 GB", + "Intf" : "NVMe", + "Med" : "SSD", + "SED" : "N", + "PI" : "N", + "SeSz" : "512B", + "Model" : "MZXLR800HBHQ-000H3 ", + "Sp" : "U", + "Type" : "-" + } + ], + "VD239 Properties" : { + "Strip Size" : "64 KB", + "Number of Blocks" : 1561722880, + "VD has Emulated Drive" : "No", + "Span Depth" : 1, + "Number of Drives Per Span" : 2, + "Write Cache(initial setting)" : "WriteThrough", + "Disk Cache Policy" : "Disk's Default", + "Write Cache Status" : "Disabled", + "Encryption" : "None", + "Data Integrity" : "None", + "Active Operations" : "None", + "Exposed to OS" : "Yes", + "OS Drive Name" : "/dev/sda", + "Creation Date" : "02-08-2022", + "Creation Time" : "01:29:12 PM", + "Emulation type" : "default", + "Cachebypass size" : "Cachebypass-64k", + "Cachebypass Mode" : "Cachebypass Intelligent", + "Is LD Ready for OS Requests" : "Yes", + "SCSI NAA Id" : "600062b2073a48402a7be3283fc1215e", + "Unmap Enabled" : "No" + } + } +} +] +} diff --git a/tests/unit/test_resources/show_all.txt b/tests/unit/test_resources/show_all.txt new file mode 100644 index 0000000..4e9b6af --- /dev/null +++ b/tests/unit/test_resources/show_all.txt @@ -0,0 +1,57 @@ +{ +"Controllers":[ +{ + "Command Status" : { + "CLI Version" : "007.2408.0000.0000 Nov 15, 2022", + "Operating system" : "Linux 5.15.0-72-generic", + "Status Code" : 0, + "Status" : "Success", + "Description" : "None" + }, + "Response Data" : { + "Number of Controllers" : 1, + "Host Name" : "kongfu", + "Operating System " : "Linux 5.15.0-72-generic", + "System Overview" : [ + { + "Ctl" : 0, + "Model" : "HPEMR216i-aGen10+", + "Ports" : 16, + "PDs" : 2, + "DGs" : 1, + "DNOpt" : 0, + "VDs" : 1, + "VNOpt" : 0, + "BBU" : "N/A", + "sPR" : "On", + "DS" : "-", + "EHS" : "N", + "ASOs" : 2, + "Hlth" : "Opt" + } + ], + "ASO" : [ + { + "Ctl" : 0, + "Cl" : "X", + "SAS" : "U", + "MD" : "X", + "R6" : "X", + "WC" : "U", + "R5" : "X", + "SS" : "U", + "FP" : "U", + "Re" : "X", + "CR" : "X", + "RF" : "X", + "CO" : "X", + "CW" : "X", + "HA" : "X", + "SSHA" : "X" + } + ] + } +} +] +} + diff --git a/tests/unit/test_resources/show_ctrlcount.txt b/tests/unit/test_resources/show_ctrlcount.txt new file mode 100644 index 0000000..ac3bd50 --- /dev/null +++ b/tests/unit/test_resources/show_ctrlcount.txt @@ -0,0 +1,16 @@ +{ +"Controllers":[ +{ + "Command Status" : { + "CLI Version" : "007.2408.0000.0000 Nov 15, 2022", + "Operating system" : "Linux 5.15.0-72-generic", + "Status Code" : 0, + "Status" : "Success", + "Description" : "None" + }, + "Response Data" : { + "Controller Count" : 1 + } +} +] +} diff --git a/tests/unit/test_storcli.py b/tests/unit/test_storcli.py new file mode 100644 index 0000000..59a2d32 --- /dev/null +++ b/tests/unit/test_storcli.py @@ -0,0 +1,83 @@ +import unittest +from unittest.mock import patch + +from prometheus_hardware_exporter.collectors.storcli import StorCLI +from prometheus_hardware_exporter.utils import Command + +SHOW_CTRLCOUNT = "tests/unit/test_resources/show_ctrlcount.txt" +SHOW_ALL = "tests/unit/test_resources/show_all.txt" +CX_VALL_SHOW_ALL = "tests/unit/test_resources/cx_vall_show_all.txt" + + +class TestStorCLI(unittest.TestCase): + """Test StorCLI class.""" + + @patch.object(Command, "__call__") + def test_00_get_controllers_okay(self, mock_call): + with open(SHOW_ALL, "r") as content: + mock_call.return_value = content.read(), False + storcli = StorCLI() + payload, _ = storcli.get_controllers() + self.assertEqual(payload, {"count": 1, "hostname": "kongfu"}) + + @patch.object(Command, "__call__") + def test_01_get_controllers_failed(self, mock_call): + mock_call.return_value = "", False + storcli = StorCLI() + payload, _ = storcli.get_controllers() + self.assertIsNone(payload) + + @patch.object(StorCLI, "_get_controller_ids") + @patch.object(Command, "__call__") + def test_10_get_all_virtual_drives_okay(self, mock_call, mock__get_controller_ids): + with open(CX_VALL_SHOW_ALL, "r") as content: + mock_call.return_value = content.read(), False + mock__get_controller_ids.return_value = [1], False + storcli = StorCLI() + payload, _ = storcli.get_all_virtual_drives() + self.assertEqual( + payload, {1: [{"DG": "0", "VD": "239", "state": "Optl", "cache": "NRWTD"}]} + ) + + @patch.object(StorCLI, "_get_all_virtual_drives") + @patch.object(StorCLI, "_get_controller_ids") + @patch.object(Command, "__call__") + def test_11_get_all_virtual_drives_failed( + self, mock_call, mock__get_controller_ids, mock__get_all_virtual_drives + ): + with open(CX_VALL_SHOW_ALL, "r") as content: + mock_call.return_value = content.read(), False + mock__get_controller_ids.return_value = [1], False + mock__get_all_virtual_drives.return_value = None, True + storcli = StorCLI() + payload, _ = storcli.get_all_virtual_drives() + self.assertIsNone(payload) + + @patch.object(Command, "__call__") + def test_20__get_controller_id_okay(self, mock_call): + with open(SHOW_CTRLCOUNT, "r") as content: + mock_call.return_value = content.read(), False + storcli = StorCLI() + payload, _ = storcli._get_controller_ids() + self.assertEqual(payload, [0]) + + @patch.object(Command, "__call__") + def test_21__get_controller_id_failed(self, mock_call): + mock_call.return_value = "", False + storcli = StorCLI() + payload, _ = storcli._get_controller_ids() + self.assertIsNone(payload) + + @patch.object(Command, "__call__") + def test_30__get_all_virtual_drives_failed(self, mock_call): + mock_call.return_value = "", False + storcli = StorCLI() + payload, _ = storcli._get_all_virtual_drives(0) + self.assertIsNone(payload) + + @patch.object(Command, "__call__") + def test_31__get_all_virtual_drives_failed(self, mock_call): + mock_call.return_value = "", True + storcli = StorCLI() + payload, _ = storcli._get_all_virtual_drives(0) + self.assertIsNone(payload) diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py new file mode 100644 index 0000000..56a6f6b --- /dev/null +++ b/tests/unit/test_utils.py @@ -0,0 +1,34 @@ +import unittest +from unittest.mock import patch + +from prometheus_hardware_exporter.utils import Command + + +class TestCommand(unittest.TestCase): + """Test Command class.""" + + @patch.object(Command, "check_output") + def test_check_installed(self, mock_check_output): + mock_check_output.return_value = True, False + command = Command() + self.assertTrue(command.check_installed()) + + @patch.object(Command, "check_installed") + @patch.object(Command, "check_output") + def test_call_okay(self, mock_check_output, mock_check_installed): + mock_check_installed.return_value = True, False + mock_check_output.return_value = True, False + command = Command() + result, error = command() + self.assertFalse(error) + + @patch.object(Command, "check_installed") + @patch.object(Command, "check_output") + def test_call_failed(self, mock_check_output, mock_check_installed): + mock_check_installed.return_value = True, False + mock_check_output.return_value = False, True + command = Command() + result, error = command() + print(result, error) + self.assertTrue(error) + self.assertIsNone(result) diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..b54137a --- /dev/null +++ b/tox.ini @@ -0,0 +1,74 @@ +# This is a template `tox.ini` file for snaps +# This file is managed by bootstack-charms-spec and should not be modified +# within individual snap repos. https://launchpad.net/bootstack-charms-spec + +[tox] +skipsdist=True +envlist = lint, unit, func +skip_missing_interpreters = True + +[testenv] +basepython = python3 +setenv = PYTHONPATH={toxinidir} + +[testenv:dev-environment] +envdir = {toxinidir}/.venv +deps = + pre-commit + {[testenv:lint]deps} + {[testenv:unit]deps} + {[testenv:func]deps} +commands = + pre-commit install + +[testenv:pre-commit] +envdir = {[testenv:dev-environment]envdir} +deps = {[testenv:dev-environment]deps} # ensure that dev-environment is installed +commands = pre-commit run --all-files + +[testenv:lint] +commands = + pflake8 + pylint --recursive=y . + mypy --install-types --non-interactive . + black --check --diff --color . + isort --check --diff --color . +deps = + black + flake8 + pyproject-flake8 + flake8-docstrings + pep8-naming + flake8-colors + colorama + isort + pylint + mypy + {[testenv:unit]deps} + {[testenv:func]deps} + +[testenv:reformat] +envdir = {toxworkdir}/lint +deps = {[testenv:lint]deps} +commands = + black . + isort . + +[testenv:unit] +deps = + -r {toxinidir}/requirements.txt + -r {toxinidir}/tests/unit/requirements.txt + pytest + pytest-cov +commands = pytest {toxinidir}/tests/unit \ + {posargs:-v --cov --cov-report=term-missing --cov-report=html --cov-report=xml} + +[testenv:func] +deps = + -r {toxinidir}/requirements.txt + -r {toxinidir}/tests/functional/requirements.txt + pytest +passenv = + TEST_* +commands = + pytest {toxinidir}/tests/functional {posargs:-v}