diff --git a/Dockerfile b/Dockerfile index 28f48fa8..4ef2ade5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM fedora as manpage_builder +FROM fedora:38 as manpage_builder RUN dnf install -y make pandoc python3.11-pip WORKDIR /app RUN pip install poetry diff --git a/Makefile b/Makefile index e40bed86..34b92b06 100644 --- a/Makefile +++ b/Makefile @@ -61,7 +61,7 @@ generate-man: @export QPC_VAR_CURRENT_YEAR=$(shell date +'%Y') \ && export QPC_VAR_PROJECT=$${QPC_VAR_PROJECT:-Quipucords} \ && export QPC_VAR_PROGRAM_NAME=$${QPC_VAR_PROGRAM_NAME:-qpc} \ - && poetry run jinja -X QPC_VAR docs/source/man.j2 $(ARGS) + && poetry run python docs/jinja-render.py -e '^QPC_VAR.*' -t docs/source/man.j2 $(ARGS) update-man.rst: $(MAKE) generate-man ARGS="-o docs/source/man.rst" diff --git a/docs/jinja-render.py b/docs/jinja-render.py new file mode 100644 index 00000000..33264097 --- /dev/null +++ b/docs/jinja-render.py @@ -0,0 +1,78 @@ +""" +Barebones command-line utility to render a Jinja template. + +Uses environment variables to populate the template. + +Example usage: + + # define relevant environment variables + export QPC_VAR_PROGRAM_NAME=qpc + export QPC_VAR_PROJECT=Quipucords + export QPC_VAR_CURRENT_YEAR=$(date +'%Y') + + # use stdin to read template and stdout to write output: + python ./jinja-render.py -e '^QPC_VAR_.*' \ + < ./source/man.j2 > ./source/man.rst + + # use arguments to specify template and output paths: + python ./jinja-render.py -e '^QPC_VAR_.*' \ + -t ./source/man.j2 -o ./source/man.rst +""" + +import argparse +import os +import re + +from jinja2 import DictLoader, Environment + + +def get_env_vars(allow_pattern): + """Get the matching environment variables.""" + env_vars = {} + re_pattern = re.compile(allow_pattern) + for key, value in os.environ.items(): + if re_pattern.search(key): + env_vars[key] = value + return env_vars + + +def get_template(template_file): + """Load the Jinja template.""" + with template_file as f: + template_data = f.read() + return Environment( + loader=DictLoader({"-": template_data}), keep_trailing_newline=True + ).get_template("-") + + +def get_args(): + """Parse and return command line arguments.""" + parser = argparse.ArgumentParser(description="Format Jinja template using env vars") + parser.add_argument( + "-e", + "--env_var_pattern", + type=str, + default="", + help="regex pattern to match environment variable names", + ) + parser.add_argument("-o", "--output", type=argparse.FileType("w"), default="-") + parser.add_argument("-t", "--template", type=argparse.FileType("r"), default="-") + args = parser.parse_args() + return args + + +def main(): + """Parse command line args and render Jinja template to output.""" + args = get_args() + template = get_template(template_file=args.template) + env_vars = get_env_vars(allow_pattern=args.env_var_pattern) + args.output.write(template.render(env_vars)) + if hasattr(args.output, "name"): + # This is a side effect of how ArgumentParser handles files vs stdout. + # Real output files have a "name" attribute and should be closed. + # However, we do NOT want to close if it's stdout, which has no name. + args.output.close() + + +if __name__ == "__main__": + main() diff --git a/docs/source/man.j2 b/docs/source/man.j2 index 036aa96c..6d5041fe 100644 --- a/docs/source/man.j2 +++ b/docs/source/man.j2 @@ -17,7 +17,7 @@ Description The {{ QPC_VAR_PROJECT }} tool, accessed through the ``{{ QPC_VAR_PROGRAM_NAME }}`` command, is an inspection and reporting tool. It is designed to identify environment data, or *facts*, such as the number of physical and virtual systems on a network, their operating systems, and other configuration data. In addition, it is designed to identify and report more detailed facts for some versions of key Red Hat packages and products for the Linux based IT resources in that network. The ability to inspect the software and systems that are running on your network improves your ability to understand and report on your usage. Ultimately, this inspection and reporting process is part of the larger system administration task of managing your inventories. -The {{ QPC_VAR_PROJECT }} tool uses two types of configuration to manage the inspection process. A *credential* contains configuration, such as the username and password or SSH key of the user that runs the inspection process. Certain credential types also support the use of an access token as an alternative authentication method. A *source* defines the entity to be inspected and one or more credentials to use during the inspection process. The entity to be inspected can be a host, subnet, network, or systems management solution such as Openshift, Advanced Cluster Security, Ansible Automation Platform, vCenter Server, or Satellite. You can save multiple credentials and sources to use with {{ QPC_VAR_PROJECT }} in various combinations as you run inspection processes, or *scans*. When you have completed a scan, you can access the output as a *report* to review the results. +The {{ QPC_VAR_PROJECT }} tool uses two types of configuration to manage the inspection process. A *credential* contains configuration, such as the username and password or SSH key of the user that runs the inspection process. Certain credential types also support the use of an access token as an alternative authentication method. A *source* defines the entity to be inspected and one or more credentials to use during the inspection process. The entity to be inspected can be a host, subnet, network, or systems management solution such as Openshift, Red Hat Advanced Cluster Security, Ansible Automation Platform, vCenter Server, or Satellite. You can save multiple credentials and sources to use with {{ QPC_VAR_PROJECT }} in various combinations as you run inspection processes, or *scans*. When you have completed a scan, you can access the output as a *report* to review the results. By default, the credentials and sources that are created when using {{ QPC_VAR_PROJECT }} are encrypted in a database. The values are encrypted with AES-256 encryption. They are decrypted when the {{ QPC_VAR_PROJECT }} server runs a scan by using a *vault password* to access the encrypted values that are stored in the database. The {{ QPC_VAR_PROJECT }} tool is an *agentless* inspection tool, so there is no need to install the tool on the sources to be inspected. @@ -85,7 +85,7 @@ To configure the connection to the server, supply the host address. Supplying a Logging in to the server -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~ To log in to the server after the connection is configured, use the ``login`` subcommand. This command retrieves a token that is used for authentication with any command line interface commands that follow it. @@ -101,7 +101,7 @@ To log in to the server after the connection is configured, use the ``login`` su Logging out of the server -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~ To log out of the server, use the ``logout`` subcommand. This command removes the token that was created when the ``login`` command was used. @@ -109,7 +109,7 @@ To log out of the server, use the ``logout`` subcommand. This command removes th Viewing the server status -~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~ To view or save the status information for the server, use the ``status`` subcommand. This command returns data about your {{ QPC_VAR_PROJECT }} server environment, such as server build and API versions, environment variable information, installed prerequisites and versions, and other server metadata that can help diagnose issues during troubleshooting. @@ -134,7 +134,7 @@ Creating and Editing Credentials To create a credential, supply the type of credential and supply SSH credentials as either a username-password pair, a username-key pair, or an access token. The {{ QPC_VAR_PROJECT }} tool stores each set of credentials in a separate credential entry. -**{{ QPC_VAR_PROGRAM_NAME }} cred add --name=** *name* **--type=** *(network | vcenter | satellite | openshift | acs | ansible)* **--username=** *username* **(--password | --sshkeyfile=** *key_file* | --sshkey**)** **[--sshpassphrase]** **--become-method=** *(sudo | su | pbrun | pfexec | doas | dzdo | ksu | runas )* **--become-user=** *user* **[--become-password]** **[--token]** +**{{ QPC_VAR_PROGRAM_NAME }} cred add --name=** *name* **--type=** *(network | vcenter | satellite | openshift | rhacs | ansible)* **--username=** *username* **(--password | --sshkeyfile=** *key_file* | --sshkey**)** **[--sshpassphrase]** **--become-method=** *(sudo | su | pbrun | pfexec | doas | dzdo | ksu | runas )* **--become-user=** *user* **[--become-password]** **[--token]** ``--name=name`` @@ -142,7 +142,7 @@ To create a credential, supply the type of credential and supply SSH credentials ``--type=type`` - Required. Sets the type of credential. The value must be ``network``, ``vcenter``, ``satellite``, ``openshift``, ``acs`` or ``ansible``. You cannot edit a credential's type after creating it. + Required. Sets the type of credential. The value must be ``network``, ``vcenter``, ``satellite``, ``openshift``, ``rhacs`` or ``ansible``. You cannot edit a credential's type after creating it. ``--username=username`` @@ -185,15 +185,15 @@ The information in a credential might change, including passwords, become passwo **{{ QPC_VAR_PROGRAM_NAME }} cred edit --name=** *name* **--username=** *username* **(--password | --sshkeyfile=** *key_file* | --sshkey **)** **[--sshpassphrase]** **--become-method=** *(sudo | su | pbrun | pfexec | doas | dzdo | ksu | runas )* **--become-user=** *user* **[--become-password]** **[--token]** Listing and Showing Credentials -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The ``{{ QPC_VAR_PROGRAM_NAME }} cred list`` command returns the details for every credential that is configured for {{ QPC_VAR_PROJECT }}. This output includes the name, username, password, SSH keyfile, sudo password, or token (if applicable) for each entry. Passwords and tokens are masked if provided, if not, they will appear as ``null``. -**{{ QPC_VAR_PROGRAM_NAME }} cred list --type=** *(network | vcenter | satellite | openshift | acs | ansible)* +**{{ QPC_VAR_PROGRAM_NAME }} cred list --type=** *(network | vcenter | satellite | openshift | rhacs | ansible)* ``--type=type`` - Optional. Filters the results by credential type. The value must be ``network``, ``vcenter``, ``satellite``, ``openshift``, ``acs``, or ``ansible``. + Optional. Filters the results by credential type. The value must be ``network``, ``vcenter``, ``satellite``, ``openshift``, ``rhacs``, or ``ansible``. The ``{{ QPC_VAR_PROGRAM_NAME }} cred show`` command is the same as the ``{{ QPC_VAR_PROGRAM_NAME }} cred list`` command, except that it returns details for a single specified credential. @@ -205,7 +205,7 @@ The ``{{ QPC_VAR_PROGRAM_NAME }} cred show`` command is the same as the ``{{ QPC Clearing Credentials -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~ As the network infrastructure changes, it might be necessary to delete some credentials. Use the ``clear`` subcommand to delete credentials. @@ -223,20 +223,20 @@ As the network infrastructure changes, it might be necessary to delete some cred Sources ----------------- +------- Use the ``{{ QPC_VAR_PROGRAM_NAME }} source`` command to create and manage sources. -A source contains a single entity or a set of multiple entities that are to be inspected. A source can be one or more physical machines, virtual machines, or containers, or it can be a collection of network information, including IP addresses or host names, or it can be information about a systems management solution such as Openshift, Advanced Cluster Security, Ansible Automation Platform, vCenter Server, or Satellite. The source also contains information about the SSH ports and SSH credentials that are needed to access the systems to be inspected. The SSH credentials are provided through reference to one or more of the {{ QPC_VAR_PROJECT }} credentials that you configure. +A source contains a single entity or a set of multiple entities that are to be inspected. A source can be one or more physical machines, virtual machines, or containers, or it can be a collection of network information, including IP addresses or host names, or it can be information about a systems management solution such as Openshift, Red Hat Advanced Cluster Security, Ansible Automation Platform, vCenter Server, or Satellite. The source also contains information about the SSH ports and SSH credentials that are needed to access the systems to be inspected. The SSH credentials are provided through reference to one or more of the {{ QPC_VAR_PROJECT }} credentials that you configure. When you configure a scan, it contains references to one or more sources, including the credentials that are provided in each source. Therefore, you can reference sources in different scan configurations for various purposes, for example, to scan your entire infrastructure or a specific sector of that infrastructure. Creating and Editing Sources -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ To create a source, supply the type of source with the ``type`` option, one or more host names or IP addresses to connect to with the ``--hosts`` option, and the credentials needed to access those systems with the ``--cred`` option. The ``{{ QPC_VAR_PROGRAM_NAME }} source`` command allows multiple entries for the ``hosts`` and ``cred`` options. Therefore, a single source can access a collection of servers and subnets as needed to create an accurate and complete scan. -**{{ QPC_VAR_PROGRAM_NAME }} source add --name=** *name* **--type=** *(network | vcenter | satellite | openshift | acs | ansible)* **--hosts** *ip_address* **--cred** *credential* **[--exclude-hosts** *ip_address* **]** **[--port=** *port* **]** **[--use-paramiko=** *(True | False)* **]** **[--ssl-cert-verify=** *(True | False)* **]** **[--ssl-protocol=** *protocol* **]** **[--disable-ssl=** *(True | False)* **]** +**{{ QPC_VAR_PROGRAM_NAME }} source add --name=** *name* **--type=** *(network | vcenter | satellite | openshift | rhacs | ansible)* **--hosts** *ip_address* **--cred** *credential* **[--exclude-hosts** *ip_address* **]** **[--port=** *port* **]** **[--use-paramiko=** *(True | False)* **]** **[--ssl-cert-verify=** *(True | False)* **]** **[--ssl-protocol=** *protocol* **]** **[--disable-ssl=** *(True | False)* **]** ``--name=name`` @@ -244,7 +244,7 @@ To create a source, supply the type of source with the ``type`` option, one or m ``--type=type`` - Required. Sets the type of source. The value must be ``network``, ``vcenter``, ``satellite``, ``openshift``, ``acs``, or ``ansible``. The type cannot be edited after a source is created. + Required. Sets the type of source. The value must be ``network``, ``vcenter``, ``satellite``, ``openshift``, ``rhacs``, or ``ansible``. The type cannot be edited after a source is created. ``--hosts ip_address`` @@ -282,7 +282,7 @@ To create a source, supply the type of source with the ``type`` option, one or m ``--port=port`` - Optional. Sets a port to be used for the scan. This value supports connection and inspection on a non-standard port. By default, a Network scan uses port 22, vCenter, Ansible, ACS and Satellite scans use port 443, and an Openshift scan uses port 6443. + Optional. Sets a port to be used for the scan. This value supports connection and inspection on a non-standard port. By default, a Network scan uses port 22, vCenter, Ansible, RHACS and Satellite scans use port 443, and an Openshift scan uses port 6443. ``--use-paramiko=(True | False)`` @@ -313,15 +313,15 @@ For example, if a source contains a value of ``server1creds`` for the ``--cred`` **TIP:** After editing a source, use the ``{{ QPC_VAR_PROGRAM_NAME }} source show`` command to review those edits. Listing and Showing Sources -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~ The ``{{ QPC_VAR_PROGRAM_NAME }} source list`` command returns the details for all configured sources. The output of this command includes the host names, IP addresses, or IP ranges, the credentials, and the ports that are configured for each source. -**{{ QPC_VAR_PROGRAM_NAME }} source list [--type=** *(network | vcenter | satellite | openshift | acs | ansible)* **]** +**{{ QPC_VAR_PROGRAM_NAME }} source list [--type=** *(network | vcenter | satellite | openshift | rhacs | ansible)* **]** ``--type=type`` - Optional. Filters the results by source type. The value must be ``network``, ``vcenter``, ``satellite``, ``openshift``, ``acs``, or ``ansible``. + Optional. Filters the results by source type. The value must be ``network``, ``vcenter``, ``satellite``, ``openshift``, ``rhacs``, or ``ansible``. The ``{{ QPC_VAR_PROGRAM_NAME }} source show`` command is the same as the ``{{ QPC_VAR_PROGRAM_NAME }} source list`` command, except that it returns details for a single specified source. @@ -334,7 +334,7 @@ The ``{{ QPC_VAR_PROGRAM_NAME }} source show`` command is the same as the ``{{ Q Clearing Sources -~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~ As the network infrastructure changes, it might be necessary to delete some sources. Use the ``{{ QPC_VAR_PROGRAM_NAME }} source clear`` command to delete sources. @@ -354,12 +354,13 @@ Scans Use the ``{{ QPC_VAR_PROGRAM_NAME }} scan`` command to create, run and manage scans. -A scan contains a set of one or more sources of any type, plus additional options that refine how the scan runs, such as the products to omit from the scan, and the maximum number of parallel system scans. Because a scan can combine sources of different types, you can include any combination of Network, OpenShift, Advanced Cluster Security, Ansible Automation Platform, Satellite, and vCenter Server sources in a single scan. When you configure a scan to include multiple sources of different types, for example a Network source and a Satellite source, the same part of your infrastructure might be scanned more than once. The results for this type of scan could show duplicate information in the reported results. However, you have the option to view the unprocessed detailed report that would show these duplicate results for each source type, or a processed deployments report with deduplicated and merged results. +A scan contains a set of one or more sources of any type, plus additional options that refine how the scan runs, such as the products to omit from the scan, and the maximum number of parallel system scans. Because a scan can combine sources of different types, you can include any combination of Network, OpenShift, Red Hat Advanced Cluster Security, Ansible Automation Platform, Satellite, and vCenter Server sources in a single scan. When you configure a scan to include multiple sources of different types, for example a Network source and a Satellite source, the same part of your infrastructure might be scanned more than once. The results for this type of scan could show duplicate information in the reported results. However, you have the option to view the unprocessed detailed report that would show these duplicate results for each source type, or a processed deployments report with deduplicated and merged results. The creation of a scan groups sources, the credentials contained within those sources, and the other options so that the act of running the scan is repeatable. When you run the scan, each instance is saved as a scan job. Creating and Editing Scans -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~ + Use the ``{{ QPC_VAR_PROGRAM_NAME }} scan add`` command to create scan objects with one or more sources. This command creates a scan object that references the supplied sources and contains any options supplied by the user. **{{ QPC_VAR_PROGRAM_NAME }} scan add --name** *name* **--sources=** *source_list* **[--max-concurrency=** *concurrency* **]** **[--disabled-optional-products=** *products_list* **]** **[--enabled-ext-product-search=** *products_list* **]** **[--ext-product-search-dirs=** *search_dirs_list* **]** @@ -501,14 +502,15 @@ The ``{{ QPC_VAR_PROGRAM_NAME }} scan cancel`` command cancels the execution of Reports --------- +------- Use the ``{{ QPC_VAR_PROGRAM_NAME }} report`` command to retrieve a report from a scan. You can retrieve a report in a JavaScript Object Notation (JSON) format or in a comma-separated values (CSV) format. There are three different types of reports that you can retrieve, a *details* report, a *deployments* report, and an *insights* report. Viewing the Details Report ~~~~~~~~~~~~~~~~~~~~~~~~~~ -The ``{{ QPC_VAR_PROGRAM_NAME }} report details`` command retrieves a detailed report that contains the unprocessed facts that are gathered during a scan. These facts are the raw output from Network, vCenter, Satellite, Openshift, Advanced Cluster Security and Ansible scans, as applicable. + +The ``{{ QPC_VAR_PROGRAM_NAME }} report details`` command retrieves a detailed report that contains the unprocessed facts that are gathered during a scan. These facts are the raw output from Network, vCenter, Satellite, Openshift, Red Hat Advanced Cluster Security and Ansible scans, as applicable. **{{ QPC_VAR_PROGRAM_NAME }} report details (--scan-job** *scan_job_identifier* **|** **--report** *report_identifier* **)** **(--json|--csv)** **--output-file** *path* **[--mask]** @@ -538,6 +540,7 @@ The ``{{ QPC_VAR_PROGRAM_NAME }} report details`` command retrieves a detailed r Viewing the Deployments Report ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + The ``{{ QPC_VAR_PROGRAM_NAME }} report deployments`` command retrieves a report that contains the processed fingerprints from a scan. A *fingerprint* is the set of system, product, and entitlement facts for a particular physical or virtual machine. A processed fingerprint results from a procedure that merges facts from various sources, and, when possible, deduplicates redundant systems. For example, the raw facts of a scan that includes both Network and vCenter sources could show two instances of a machine, indicated by an identical MAC address. The deployments report results in a deduplicated and merged fingerprint that shows both the Network and vCenter facts for that machine as a single set. @@ -569,7 +572,8 @@ For example, the raw facts of a scan that includes both Network and vCenter sour Displays the results of the report with sensitive data masked by a hash. Viewing the Insights Report -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + The ``{{ QPC_VAR_PROGRAM_NAME }} report insights`` command retrieves a report that contains the hosts to be uploaded to the subscription insights service. A *host* is the set of system, product, and entitlement facts for a particular physical or virtual machine. **{{ QPC_VAR_PROGRAM_NAME }} report insights (--scan-job** *scan_job_identifier* **|** **--report** *report_identifier* **)** **--output-file** *path* @@ -589,6 +593,7 @@ The ``{{ QPC_VAR_PROGRAM_NAME }} report insights`` command retrieves a report th Downloading Reports ~~~~~~~~~~~~~~~~~~~ + The ``{{ QPC_VAR_PROGRAM_NAME }} report download`` command downloads a set of reports, identified either by scan job identifer or report identifier, as a TAR.GZ file. The report TAR.GZ file contains the details and deployments reports in both their JSON and CSV formats. **{{ QPC_VAR_PROGRAM_NAME }} report download (--scan-job** *scan_job_identifier* **|** **--report** *report_identifier* **)** **--output-file** *path* **[--mask]** @@ -611,6 +616,7 @@ The ``{{ QPC_VAR_PROGRAM_NAME }} report download`` command downloads a set of re Merging Scan Job Results ~~~~~~~~~~~~~~~~~~~~~~~~ + The ``{{ QPC_VAR_PROGRAM_NAME }} report merge`` command merges report data and returns the report identifier of the merged report. You can use this report identifier and the ``{{ QPC_VAR_PROGRAM_NAME }} report`` command with the ``details`` or ``deployments`` subcommands to retrieve a report from the merged results. **{{ QPC_VAR_PROGRAM_NAME }} report merge (--job-ids** *scan_job_identifiers* **|** **--report-ids** *report_identifiers* **|** **--json-files** *json_details_report_files* **|** **--json-directory** *path_to_directory_of_json_files* **)** @@ -637,6 +643,7 @@ The ``{{ QPC_VAR_PROGRAM_NAME }} report merge`` command runs an asynchronous job Viewing the Status of a Report Merge ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + The ``{{ QPC_VAR_PROGRAM_NAME }} report merge-status`` command can be used to check the status of a large merge of JSON details report files. A large merge is created with the ``{{ QPC_VAR_PROGRAM_NAME }} report merge --json-directory=path_to_directory_of_json_files`` command. This command returns a merge job ID that you can use to access the status of the merge. **{{ QPC_VAR_PROGRAM_NAME }} report merge-status (--job** *report_job_identifier* **)** @@ -648,6 +655,7 @@ The ``{{ QPC_VAR_PROGRAM_NAME }} report merge-status`` command can be used to ch Manually Reprocessing Reports ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + The ``{{ QPC_VAR_PROGRAM_NAME }} report upload`` command uploads a details report to reprocess it. This could be useful if a value in the details report caused a system to be excluded. After modication of the details report, simply run the ``{{ QPC_VAR_PROGRAM_NAME }} report upload --json-file DETAILS_REPORT_JSON``. **{{ QPC_VAR_PROGRAM_NAME }} report upload (--json-file** *json_details_report_file* **)** @@ -698,6 +706,7 @@ To configure Insights credentials, simply provide the appropriate username and p Publishing to Insights ~~~~~~~~~~~~~~~~~~~~~~ + The ``{{ QPC_VAR_PROGRAM_NAME }} insights publish`` command allows you to publish an Insights report to Red Hat Insights and its services. You have two options for publishing a report: use the associated report identifier from the generating scan, or provide a previously downloaded report as an input file. **{{ QPC_VAR_PROGRAM_NAME }} insights publish (--report** *report_identifiers* **| --input-file** *path_to_tar_gz* ) @@ -767,9 +776,9 @@ Examples ``{{ QPC_VAR_PROGRAM_NAME }} cred add --name ansible_cred --type ansible --username ansible_user --password`` -* Creating a new acs type credential +* Creating a new rhacs type credential - ``{{ QPC_VAR_PROGRAM_NAME }} cred add --name acs_cred --type acs --token`` + ``{{ QPC_VAR_PROGRAM_NAME }} cred add --name rhacs_cred --type rhacs --token`` * Listing all credentials @@ -811,9 +820,9 @@ Examples ``{{ QPC_VAR_PROGRAM_NAME }} source add --name ansible_source --type ansible --hosts 10.0.205.205 --ssl-cert-verify false --cred ansible_cred`` -* Creating a new acs source +* Creating a new rhacs source - ``{{ QPC_VAR_PROGRAM_NAME }} source add --name acs_source --type acs --hosts acs-cluster.example.com --cred acs_cred`` + ``{{ QPC_VAR_PROGRAM_NAME }} source add --name rhacs_source --type rhacs --hosts rhacs-cluster.example.com --cred rhacs_cred`` * Editing a source diff --git a/docs/source/man.rst b/docs/source/man.rst index 90e22acf..5fad3afa 100644 --- a/docs/source/man.rst +++ b/docs/source/man.rst @@ -17,7 +17,7 @@ Description The Quipucords tool, accessed through the ``qpc`` command, is an inspection and reporting tool. It is designed to identify environment data, or *facts*, such as the number of physical and virtual systems on a network, their operating systems, and other configuration data. In addition, it is designed to identify and report more detailed facts for some versions of key Red Hat packages and products for the Linux based IT resources in that network. The ability to inspect the software and systems that are running on your network improves your ability to understand and report on your usage. Ultimately, this inspection and reporting process is part of the larger system administration task of managing your inventories. -The Quipucords tool uses two types of configuration to manage the inspection process. A *credential* contains configuration, such as the username and password or SSH key of the user that runs the inspection process. Certain credential types also support the use of an access token as an alternative authentication method. A *source* defines the entity to be inspected and one or more credentials to use during the inspection process. The entity to be inspected can be a host, subnet, network, or systems management solution such as Openshift, Advanced Cluster Security, Ansible Automation Platform, vCenter Server, or Satellite. You can save multiple credentials and sources to use with Quipucords in various combinations as you run inspection processes, or *scans*. When you have completed a scan, you can access the output as a *report* to review the results. +The Quipucords tool uses two types of configuration to manage the inspection process. A *credential* contains configuration, such as the username and password or SSH key of the user that runs the inspection process. Certain credential types also support the use of an access token as an alternative authentication method. A *source* defines the entity to be inspected and one or more credentials to use during the inspection process. The entity to be inspected can be a host, subnet, network, or systems management solution such as Openshift, Red Hat Advanced Cluster Security, Ansible Automation Platform, vCenter Server, or Satellite. You can save multiple credentials and sources to use with Quipucords in various combinations as you run inspection processes, or *scans*. When you have completed a scan, you can access the output as a *report* to review the results. By default, the credentials and sources that are created when using Quipucords are encrypted in a database. The values are encrypted with AES-256 encryption. They are decrypted when the Quipucords server runs a scan by using a *vault password* to access the encrypted values that are stored in the database. The Quipucords tool is an *agentless* inspection tool, so there is no need to install the tool on the sources to be inspected. @@ -85,7 +85,7 @@ To configure the connection to the server, supply the host address. Supplying a Logging in to the server -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~ To log in to the server after the connection is configured, use the ``login`` subcommand. This command retrieves a token that is used for authentication with any command line interface commands that follow it. @@ -101,7 +101,7 @@ To log in to the server after the connection is configured, use the ``login`` su Logging out of the server -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~ To log out of the server, use the ``logout`` subcommand. This command removes the token that was created when the ``login`` command was used. @@ -109,7 +109,7 @@ To log out of the server, use the ``logout`` subcommand. This command removes th Viewing the server status -~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~ To view or save the status information for the server, use the ``status`` subcommand. This command returns data about your Quipucords server environment, such as server build and API versions, environment variable information, installed prerequisites and versions, and other server metadata that can help diagnose issues during troubleshooting. @@ -134,7 +134,7 @@ Creating and Editing Credentials To create a credential, supply the type of credential and supply SSH credentials as either a username-password pair, a username-key pair, or an access token. The Quipucords tool stores each set of credentials in a separate credential entry. -**qpc cred add --name=** *name* **--type=** *(network | vcenter | satellite | openshift | acs | ansible)* **--username=** *username* **(--password | --sshkeyfile=** *key_file* | --sshkey**)** **[--sshpassphrase]** **--become-method=** *(sudo | su | pbrun | pfexec | doas | dzdo | ksu | runas )* **--become-user=** *user* **[--become-password]** **[--token]** +**qpc cred add --name=** *name* **--type=** *(network | vcenter | satellite | openshift | rhacs | ansible)* **--username=** *username* **(--password | --sshkeyfile=** *key_file* | --sshkey**)** **[--sshpassphrase]** **--become-method=** *(sudo | su | pbrun | pfexec | doas | dzdo | ksu | runas )* **--become-user=** *user* **[--become-password]** **[--token]** ``--name=name`` @@ -142,7 +142,7 @@ To create a credential, supply the type of credential and supply SSH credentials ``--type=type`` - Required. Sets the type of credential. The value must be ``network``, ``vcenter``, ``satellite``, ``openshift``, ``acs`` or ``ansible``. You cannot edit a credential's type after creating it. + Required. Sets the type of credential. The value must be ``network``, ``vcenter``, ``satellite``, ``openshift``, ``rhacs`` or ``ansible``. You cannot edit a credential's type after creating it. ``--username=username`` @@ -185,15 +185,15 @@ The information in a credential might change, including passwords, become passwo **qpc cred edit --name=** *name* **--username=** *username* **(--password | --sshkeyfile=** *key_file* | --sshkey **)** **[--sshpassphrase]** **--become-method=** *(sudo | su | pbrun | pfexec | doas | dzdo | ksu | runas )* **--become-user=** *user* **[--become-password]** **[--token]** Listing and Showing Credentials -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The ``qpc cred list`` command returns the details for every credential that is configured for Quipucords. This output includes the name, username, password, SSH keyfile, sudo password, or token (if applicable) for each entry. Passwords and tokens are masked if provided, if not, they will appear as ``null``. -**qpc cred list --type=** *(network | vcenter | satellite | openshift | acs | ansible)* +**qpc cred list --type=** *(network | vcenter | satellite | openshift | rhacs | ansible)* ``--type=type`` - Optional. Filters the results by credential type. The value must be ``network``, ``vcenter``, ``satellite``, ``openshift``, ``acs``, or ``ansible``. + Optional. Filters the results by credential type. The value must be ``network``, ``vcenter``, ``satellite``, ``openshift``, ``rhacs``, or ``ansible``. The ``qpc cred show`` command is the same as the ``qpc cred list`` command, except that it returns details for a single specified credential. @@ -205,7 +205,7 @@ The ``qpc cred show`` command is the same as the ``qpc cred list`` command, exce Clearing Credentials -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~ As the network infrastructure changes, it might be necessary to delete some credentials. Use the ``clear`` subcommand to delete credentials. @@ -223,20 +223,20 @@ As the network infrastructure changes, it might be necessary to delete some cred Sources ----------------- +------- Use the ``qpc source`` command to create and manage sources. -A source contains a single entity or a set of multiple entities that are to be inspected. A source can be one or more physical machines, virtual machines, or containers, or it can be a collection of network information, including IP addresses or host names, or it can be information about a systems management solution such as Openshift, Advanced Cluster Security, Ansible Automation Platform, vCenter Server, or Satellite. The source also contains information about the SSH ports and SSH credentials that are needed to access the systems to be inspected. The SSH credentials are provided through reference to one or more of the Quipucords credentials that you configure. +A source contains a single entity or a set of multiple entities that are to be inspected. A source can be one or more physical machines, virtual machines, or containers, or it can be a collection of network information, including IP addresses or host names, or it can be information about a systems management solution such as Openshift, Red Hat Advanced Cluster Security, Ansible Automation Platform, vCenter Server, or Satellite. The source also contains information about the SSH ports and SSH credentials that are needed to access the systems to be inspected. The SSH credentials are provided through reference to one or more of the Quipucords credentials that you configure. When you configure a scan, it contains references to one or more sources, including the credentials that are provided in each source. Therefore, you can reference sources in different scan configurations for various purposes, for example, to scan your entire infrastructure or a specific sector of that infrastructure. Creating and Editing Sources -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ To create a source, supply the type of source with the ``type`` option, one or more host names or IP addresses to connect to with the ``--hosts`` option, and the credentials needed to access those systems with the ``--cred`` option. The ``qpc source`` command allows multiple entries for the ``hosts`` and ``cred`` options. Therefore, a single source can access a collection of servers and subnets as needed to create an accurate and complete scan. -**qpc source add --name=** *name* **--type=** *(network | vcenter | satellite | openshift | acs | ansible)* **--hosts** *ip_address* **--cred** *credential* **[--exclude-hosts** *ip_address* **]** **[--port=** *port* **]** **[--use-paramiko=** *(True | False)* **]** **[--ssl-cert-verify=** *(True | False)* **]** **[--ssl-protocol=** *protocol* **]** **[--disable-ssl=** *(True | False)* **]** +**qpc source add --name=** *name* **--type=** *(network | vcenter | satellite | openshift | rhacs | ansible)* **--hosts** *ip_address* **--cred** *credential* **[--exclude-hosts** *ip_address* **]** **[--port=** *port* **]** **[--use-paramiko=** *(True | False)* **]** **[--ssl-cert-verify=** *(True | False)* **]** **[--ssl-protocol=** *protocol* **]** **[--disable-ssl=** *(True | False)* **]** ``--name=name`` @@ -244,7 +244,7 @@ To create a source, supply the type of source with the ``type`` option, one or m ``--type=type`` - Required. Sets the type of source. The value must be ``network``, ``vcenter``, ``satellite``, ``openshift``, ``acs``, or ``ansible``. The type cannot be edited after a source is created. + Required. Sets the type of source. The value must be ``network``, ``vcenter``, ``satellite``, ``openshift``, ``rhacs``, or ``ansible``. The type cannot be edited after a source is created. ``--hosts ip_address`` @@ -282,7 +282,7 @@ To create a source, supply the type of source with the ``type`` option, one or m ``--port=port`` - Optional. Sets a port to be used for the scan. This value supports connection and inspection on a non-standard port. By default, a Network scan uses port 22, vCenter, Ansible, ACS and Satellite scans use port 443, and an Openshift scan uses port 6443. + Optional. Sets a port to be used for the scan. This value supports connection and inspection on a non-standard port. By default, a Network scan uses port 22, vCenter, Ansible, RHACS and Satellite scans use port 443, and an Openshift scan uses port 6443. ``--use-paramiko=(True | False)`` @@ -313,15 +313,15 @@ For example, if a source contains a value of ``server1creds`` for the ``--cred`` **TIP:** After editing a source, use the ``qpc source show`` command to review those edits. Listing and Showing Sources -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~ The ``qpc source list`` command returns the details for all configured sources. The output of this command includes the host names, IP addresses, or IP ranges, the credentials, and the ports that are configured for each source. -**qpc source list [--type=** *(network | vcenter | satellite | openshift | acs | ansible)* **]** +**qpc source list [--type=** *(network | vcenter | satellite | openshift | rhacs | ansible)* **]** ``--type=type`` - Optional. Filters the results by source type. The value must be ``network``, ``vcenter``, ``satellite``, ``openshift``, ``acs``, or ``ansible``. + Optional. Filters the results by source type. The value must be ``network``, ``vcenter``, ``satellite``, ``openshift``, ``rhacs``, or ``ansible``. The ``qpc source show`` command is the same as the ``qpc source list`` command, except that it returns details for a single specified source. @@ -334,7 +334,7 @@ The ``qpc source show`` command is the same as the ``qpc source list`` command, Clearing Sources -~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~ As the network infrastructure changes, it might be necessary to delete some sources. Use the ``qpc source clear`` command to delete sources. @@ -354,12 +354,13 @@ Scans Use the ``qpc scan`` command to create, run and manage scans. -A scan contains a set of one or more sources of any type, plus additional options that refine how the scan runs, such as the products to omit from the scan, and the maximum number of parallel system scans. Because a scan can combine sources of different types, you can include any combination of Network, OpenShift, Advanced Cluster Security, Ansible Automation Platform, Satellite, and vCenter Server sources in a single scan. When you configure a scan to include multiple sources of different types, for example a Network source and a Satellite source, the same part of your infrastructure might be scanned more than once. The results for this type of scan could show duplicate information in the reported results. However, you have the option to view the unprocessed detailed report that would show these duplicate results for each source type, or a processed deployments report with deduplicated and merged results. +A scan contains a set of one or more sources of any type, plus additional options that refine how the scan runs, such as the products to omit from the scan, and the maximum number of parallel system scans. Because a scan can combine sources of different types, you can include any combination of Network, OpenShift, Red Hat Advanced Cluster Security, Ansible Automation Platform, Satellite, and vCenter Server sources in a single scan. When you configure a scan to include multiple sources of different types, for example a Network source and a Satellite source, the same part of your infrastructure might be scanned more than once. The results for this type of scan could show duplicate information in the reported results. However, you have the option to view the unprocessed detailed report that would show these duplicate results for each source type, or a processed deployments report with deduplicated and merged results. The creation of a scan groups sources, the credentials contained within those sources, and the other options so that the act of running the scan is repeatable. When you run the scan, each instance is saved as a scan job. Creating and Editing Scans -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~ + Use the ``qpc scan add`` command to create scan objects with one or more sources. This command creates a scan object that references the supplied sources and contains any options supplied by the user. **qpc scan add --name** *name* **--sources=** *source_list* **[--max-concurrency=** *concurrency* **]** **[--disabled-optional-products=** *products_list* **]** **[--enabled-ext-product-search=** *products_list* **]** **[--ext-product-search-dirs=** *search_dirs_list* **]** @@ -501,14 +502,15 @@ The ``qpc scan cancel`` command cancels the execution of a scan job. A canceled Reports --------- +------- Use the ``qpc report`` command to retrieve a report from a scan. You can retrieve a report in a JavaScript Object Notation (JSON) format or in a comma-separated values (CSV) format. There are three different types of reports that you can retrieve, a *details* report, a *deployments* report, and an *insights* report. Viewing the Details Report ~~~~~~~~~~~~~~~~~~~~~~~~~~ -The ``qpc report details`` command retrieves a detailed report that contains the unprocessed facts that are gathered during a scan. These facts are the raw output from Network, vCenter, Satellite, Openshift, Advanced Cluster Security and Ansible scans, as applicable. + +The ``qpc report details`` command retrieves a detailed report that contains the unprocessed facts that are gathered during a scan. These facts are the raw output from Network, vCenter, Satellite, Openshift, Red Hat Advanced Cluster Security and Ansible scans, as applicable. **qpc report details (--scan-job** *scan_job_identifier* **|** **--report** *report_identifier* **)** **(--json|--csv)** **--output-file** *path* **[--mask]** @@ -538,6 +540,7 @@ The ``qpc report details`` command retrieves a detailed report that contains the Viewing the Deployments Report ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + The ``qpc report deployments`` command retrieves a report that contains the processed fingerprints from a scan. A *fingerprint* is the set of system, product, and entitlement facts for a particular physical or virtual machine. A processed fingerprint results from a procedure that merges facts from various sources, and, when possible, deduplicates redundant systems. For example, the raw facts of a scan that includes both Network and vCenter sources could show two instances of a machine, indicated by an identical MAC address. The deployments report results in a deduplicated and merged fingerprint that shows both the Network and vCenter facts for that machine as a single set. @@ -569,7 +572,8 @@ For example, the raw facts of a scan that includes both Network and vCenter sour Displays the results of the report with sensitive data masked by a hash. Viewing the Insights Report -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + The ``qpc report insights`` command retrieves a report that contains the hosts to be uploaded to the subscription insights service. A *host* is the set of system, product, and entitlement facts for a particular physical or virtual machine. **qpc report insights (--scan-job** *scan_job_identifier* **|** **--report** *report_identifier* **)** **--output-file** *path* @@ -589,6 +593,7 @@ The ``qpc report insights`` command retrieves a report that contains the hosts t Downloading Reports ~~~~~~~~~~~~~~~~~~~ + The ``qpc report download`` command downloads a set of reports, identified either by scan job identifer or report identifier, as a TAR.GZ file. The report TAR.GZ file contains the details and deployments reports in both their JSON and CSV formats. **qpc report download (--scan-job** *scan_job_identifier* **|** **--report** *report_identifier* **)** **--output-file** *path* **[--mask]** @@ -611,6 +616,7 @@ The ``qpc report download`` command downloads a set of reports, identified eithe Merging Scan Job Results ~~~~~~~~~~~~~~~~~~~~~~~~ + The ``qpc report merge`` command merges report data and returns the report identifier of the merged report. You can use this report identifier and the ``qpc report`` command with the ``details`` or ``deployments`` subcommands to retrieve a report from the merged results. **qpc report merge (--job-ids** *scan_job_identifiers* **|** **--report-ids** *report_identifiers* **|** **--json-files** *json_details_report_files* **|** **--json-directory** *path_to_directory_of_json_files* **)** @@ -637,6 +643,7 @@ The ``qpc report merge`` command runs an asynchronous job. The output of this co Viewing the Status of a Report Merge ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + The ``qpc report merge-status`` command can be used to check the status of a large merge of JSON details report files. A large merge is created with the ``qpc report merge --json-directory=path_to_directory_of_json_files`` command. This command returns a merge job ID that you can use to access the status of the merge. **qpc report merge-status (--job** *report_job_identifier* **)** @@ -648,6 +655,7 @@ The ``qpc report merge-status`` command can be used to check the status of a lar Manually Reprocessing Reports ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + The ``qpc report upload`` command uploads a details report to reprocess it. This could be useful if a value in the details report caused a system to be excluded. After modication of the details report, simply run the ``qpc report upload --json-file DETAILS_REPORT_JSON``. **qpc report upload (--json-file** *json_details_report_file* **)** @@ -698,6 +706,7 @@ To configure Insights credentials, simply provide the appropriate username and p Publishing to Insights ~~~~~~~~~~~~~~~~~~~~~~ + The ``qpc insights publish`` command allows you to publish an Insights report to Red Hat Insights and its services. You have two options for publishing a report: use the associated report identifier from the generating scan, or provide a previously downloaded report as an input file. **qpc insights publish (--report** *report_identifiers* **| --input-file** *path_to_tar_gz* ) @@ -767,9 +776,9 @@ Examples ``qpc cred add --name ansible_cred --type ansible --username ansible_user --password`` -* Creating a new acs type credential +* Creating a new rhacs type credential - ``qpc cred add --name acs_cred --type acs --token`` + ``qpc cred add --name rhacs_cred --type rhacs --token`` * Listing all credentials @@ -811,9 +820,9 @@ Examples ``qpc source add --name ansible_source --type ansible --hosts 10.0.205.205 --ssl-cert-verify false --cred ansible_cred`` -* Creating a new acs source +* Creating a new rhacs source - ``qpc source add --name acs_source --type acs --hosts acs-cluster.example.com --cred acs_cred`` + ``qpc source add --name rhacs_source --type rhacs --hosts rhacs-cluster.example.com --cred rhacs_cred`` * Editing a source diff --git a/docs/test_jina_render.py b/docs/test_jina_render.py new file mode 100644 index 00000000..9c4b6468 --- /dev/null +++ b/docs/test_jina_render.py @@ -0,0 +1,101 @@ +"""Tests for jinja-render.py standalone script.""" + +import importlib.util +import os +import random +import sys +import tempfile +from io import StringIO +from pathlib import Path + +import pytest + +sample_jinja_template = "hello, {{ NAME }}" + + +@pytest.fixture(scope="module") +def jinja_render(): + """ + Import the jinja-render script as a module. + + This is necessary because jinja-render.py is a standalone script + that does not live in a regular Python package. + """ + module_name = "jinja_render" + file_path = Path(__file__).parent / "jinja-render.py" + spec = importlib.util.spec_from_file_location(module_name, file_path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + + +def test_get_env_vars(jinja_render, mocker): + """Test getting only env vars that match the given pattern.""" + mocker.patch.dict( + os.environ, + { + "unrelated": "zero", + "QPC_THING": "one", + "QPC_thang": "two", + "NOT_QPC_OTHER": "three", + }, + clear=True, + ) + expected = {"QPC_THING": "one", "QPC_thang": "two"} + allow_pattern = "^QPC_.*" + actual = jinja_render.get_env_vars(allow_pattern) + assert actual == expected + + +def test_read_stdin_write_stdout(jinja_render, mocker, capsys): + """Test reading the Jinja template from stdin and writing output to stdout.""" + fake_name = str(random.random()) + expected_stdout = f"hello, {fake_name}" + + fake_env_vars = {"NAME": fake_name} + fake_sys_argv = ["script.py", "-e", ".*"] + fake_stdin = StringIO(sample_jinja_template) + + mocker.patch.dict(os.environ, fake_env_vars, clear=True) + mocker.patch.object(sys, "argv", fake_sys_argv) + mocker.patch.object(sys, "stdin", fake_stdin) + + jinja_render.main() + actual_stdout = capsys.readouterr().out + assert actual_stdout == expected_stdout + + +@pytest.fixture +def template_path(): + """Temp file containing a Jija template.""" + tmp_file = tempfile.NamedTemporaryFile() + tmp_file.write(sample_jinja_template.encode()) + tmp_file.seek(0) + yield tmp_file.name + tmp_file.close() + + +def test_read_file_write_file(jinja_render, template_path, mocker, capsys): + """Test reading the Jinja template from file and writing output to file.""" + fake_name = str(random.random()) + expected_stdout = f"hello, {fake_name}" + fake_env_vars = {"NAME": fake_name} + with tempfile.TemporaryDirectory() as output_directory: + output_path = Path(output_directory) / str(random.random()) + fake_sys_argv = [ + "script.py", + "-e", + ".*", + "-t", + template_path, + "-o", + str(output_path), + ] + mocker.patch.dict(os.environ, fake_env_vars, clear=True) + mocker.patch.object(sys, "argv", fake_sys_argv) + jinja_render.main() + + with output_path.open() as output_file: + actual_output = output_file.read() + + assert actual_output == expected_stdout diff --git a/poetry.lock b/poetry.lock index 18487864..42b0afa8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "annotated-types" @@ -11,17 +11,6 @@ files = [ {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, ] -[[package]] -name = "argparse-ext" -version = "1.4.2" -description = "argparse extension;" -optional = false -python-versions = "*" -files = [ - {file = "argparse-ext-1.4.2.tar.gz", hash = "sha256:266ef372853393a34a96405352c73c6598585026da6a26d01621fb07dc170df3"}, - {file = "argparse_ext-1.4.2-py3-none-any.whl", hash = "sha256:a1b9e901f401c534d18d51f30a91b508fd89bc109e2b447ba21ce83e03b1122c"}, -] - [[package]] name = "black" version = "23.9.1" @@ -407,37 +396,6 @@ files = [ {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, ] -[[package]] -name = "gitdb" -version = "4.0.10" -description = "Git Object Database" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, - {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.37" -description = "GitPython is a Python library used to interact with Git repositories" -optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.37-py3-none-any.whl", hash = "sha256:5f4c4187de49616d710a77e98ddf17b4782060a1788df441846bddefbb89ab33"}, - {file = "GitPython-3.1.37.tar.gz", hash = "sha256:f9b9ddc0761c125d5780eab2d64be4873fc6817c2899cbcb34b02344bdc7bc54"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" - -[package.extras] -test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-sugar"] - [[package]] name = "idna" version = "3.4" @@ -460,23 +418,6 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] -[[package]] -name = "jinja-cli" -version = "1.2.2" -description = "a command line interface to jinja;" -optional = false -python-versions = "*" -files = [ - {file = "jinja-cli-1.2.2.tar.gz", hash = "sha256:3a702c4a988046e02e08d7cf40a362bf2050aeafe08d926a54bc395610f0f5a2"}, - {file = "jinja_cli-1.2.2-py3-none-any.whl", hash = "sha256:86afa68cb2c2626cb447a445b3ab41e5da59dbe3fafa609a4624fda9b250fde9"}, -] - -[package.dependencies] -argparse-ext = "*" -Jinja2 = ">=2.11.0" -PyYAML = "*" -xmltodict = "*" - [[package]] name = "jinja2" version = "3.1.2" @@ -545,16 +486,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -953,65 +884,6 @@ pytest = ">=5.0" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - [[package]] name = "requests" version = "2.31.0" @@ -1118,28 +990,28 @@ type-check = ["mypy (>=1.0)", "types-docutils (>=0.18)"] [[package]] name = "ruff" -version = "0.0.292" +version = "0.1.3" description = "An extremely fast Python linter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.0.292-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:02f29db018c9d474270c704e6c6b13b18ed0ecac82761e4fcf0faa3728430c96"}, - {file = "ruff-0.0.292-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:69654e564342f507edfa09ee6897883ca76e331d4bbc3676d8a8403838e9fade"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c3c91859a9b845c33778f11902e7b26440d64b9d5110edd4e4fa1726c41e0a4"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4476f1243af2d8c29da5f235c13dca52177117935e1f9393f9d90f9833f69e4"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be8eb50eaf8648070b8e58ece8e69c9322d34afe367eec4210fdee9a555e4ca7"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9889bac18a0c07018aac75ef6c1e6511d8411724d67cb879103b01758e110a81"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bdfabd4334684a4418b99b3118793f2c13bb67bf1540a769d7816410402a205"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7c77c53bfcd75dbcd4d1f42d6cabf2485d2e1ee0678da850f08e1ab13081a8"}, - {file = "ruff-0.0.292-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e087b24d0d849c5c81516ec740bf4fd48bf363cfb104545464e0fca749b6af9"}, - {file = "ruff-0.0.292-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f160b5ec26be32362d0774964e218f3fcf0a7da299f7e220ef45ae9e3e67101a"}, - {file = "ruff-0.0.292-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ac153eee6dd4444501c4bb92bff866491d4bfb01ce26dd2fff7ca472c8df9ad0"}, - {file = "ruff-0.0.292-py3-none-musllinux_1_2_i686.whl", hash = "sha256:87616771e72820800b8faea82edd858324b29bb99a920d6aa3d3949dd3f88fb0"}, - {file = "ruff-0.0.292-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b76deb3bdbea2ef97db286cf953488745dd6424c122d275f05836c53f62d4016"}, - {file = "ruff-0.0.292-py3-none-win32.whl", hash = "sha256:e854b05408f7a8033a027e4b1c7f9889563dd2aca545d13d06711e5c39c3d003"}, - {file = "ruff-0.0.292-py3-none-win_amd64.whl", hash = "sha256:f27282bedfd04d4c3492e5c3398360c9d86a295be00eccc63914438b4ac8a83c"}, - {file = "ruff-0.0.292-py3-none-win_arm64.whl", hash = "sha256:7f67a69c8f12fbc8daf6ae6d36705037bde315abf8b82b6e1f4c9e74eb750f68"}, - {file = "ruff-0.0.292.tar.gz", hash = "sha256:1093449e37dd1e9b813798f6ad70932b57cf614e5c2b5c51005bf67d55db33ac"}, + {file = "ruff-0.1.3-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:b46d43d51f7061652eeadb426a9e3caa1e0002470229ab2fc19de8a7b0766901"}, + {file = "ruff-0.1.3-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:b8afeb9abd26b4029c72adc9921b8363374f4e7edb78385ffaa80278313a15f9"}, + {file = "ruff-0.1.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca3cf365bf32e9ba7e6db3f48a4d3e2c446cd19ebee04f05338bc3910114528b"}, + {file = "ruff-0.1.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4874c165f96c14a00590dcc727a04dca0cfd110334c24b039458c06cf78a672e"}, + {file = "ruff-0.1.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eec2dd31eed114e48ea42dbffc443e9b7221976554a504767ceaee3dd38edeb8"}, + {file = "ruff-0.1.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:dc3ec4edb3b73f21b4aa51337e16674c752f1d76a4a543af56d7d04e97769613"}, + {file = "ruff-0.1.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e3de9ed2e39160800281848ff4670e1698037ca039bda7b9274f849258d26ce"}, + {file = "ruff-0.1.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c595193881922cc0556a90f3af99b1c5681f0c552e7a2a189956141d8666fe8"}, + {file = "ruff-0.1.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f75e670d529aa2288cd00fc0e9b9287603d95e1536d7a7e0cafe00f75e0dd9d"}, + {file = "ruff-0.1.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:76dd49f6cd945d82d9d4a9a6622c54a994689d8d7b22fa1322983389b4892e20"}, + {file = "ruff-0.1.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:918b454bc4f8874a616f0d725590277c42949431ceb303950e87fef7a7d94cb3"}, + {file = "ruff-0.1.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d8859605e729cd5e53aa38275568dbbdb4fe882d2ea2714c5453b678dca83784"}, + {file = "ruff-0.1.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0b6c55f5ef8d9dd05b230bb6ab80bc4381ecb60ae56db0330f660ea240cb0d4a"}, + {file = "ruff-0.1.3-py3-none-win32.whl", hash = "sha256:3e7afcbdcfbe3399c34e0f6370c30f6e529193c731b885316c5a09c9e4317eef"}, + {file = "ruff-0.1.3-py3-none-win_amd64.whl", hash = "sha256:7a18df6638cec4a5bd75350639b2bb2a2366e01222825562c7346674bdceb7ea"}, + {file = "ruff-0.1.3-py3-none-win_arm64.whl", hash = "sha256:12fd53696c83a194a2db7f9a46337ce06445fb9aa7d25ea6f293cf75b21aca9f"}, + {file = "ruff-0.1.3.tar.gz", hash = "sha256:3ba6145369a151401d5db79f0a47d50e470384d0d89d0d6f7fab0b589ad07c34"}, ] [[package]] @@ -1180,17 +1052,6 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -[[package]] -name = "smmap" -version = "5.0.1" -description = "A pure Python implementation of a sliding window memory map manager" -optional = false -python-versions = ">=3.7" -files = [ - {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, - {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, -] - [[package]] name = "typer" version = "0.9.0" @@ -1268,18 +1129,7 @@ files = [ {file = "xdg-6.0.0.tar.gz", hash = "sha256:24278094f2d45e846d1eb28a2ebb92d7b67fc0cab5249ee3ce88c95f649a1c92"}, ] -[[package]] -name = "xmltodict" -version = "0.13.0" -description = "Makes working with XML feel like you are working with JSON" -optional = false -python-versions = ">=3.4" -files = [ - {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, - {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, -] - [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "108114e829dbc7bc5e00e433917ec3195110e719a2e1dc3142142fd81d84b677" +content-hash = "2e9a1d63cfcafcce85239e0e9efa084691f2263a2c8ad8cd9136abf167c7c7de" diff --git a/pyproject.toml b/pyproject.toml index e9e3b46a..6f41b98b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "qpc" -version = "1.4.0" +version = "1.4.1" description = "" authors = ["QPC Team "] license = "GPLv3" @@ -19,7 +19,6 @@ requests = ">=2.28.1" cryptography = ">=37.0.4" packaging = "^23.1" setuptools = "^67.8.0" -gitpython = "^3.1.32" [tool.poetry.group.dev.dependencies] coverage = ">=6.4.2" @@ -30,12 +29,13 @@ pytest-lazy-fixture = ">=0.6.3" requests-mock = ">=1.9.3" pytest-mock = "^3.8.2" rstcheck = "^6.1.1" -ruff = "^0.0.292" +ruff = "^0.1.3" pip-tools = "^7.1.0" pybuild-deps = "^0.1.1" + [tool.poetry.group.build.dependencies] -jinja-cli = "^1.2.2" +jinja2 = "^3.1.2" [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/qpc/release.py b/qpc/release.py index 40b2c5ef..523fb5ee 100644 --- a/qpc/release.py +++ b/qpc/release.py @@ -1,9 +1,8 @@ """File to hold release constants.""" import os +import subprocess from pathlib import Path -import git - from . import __package__version__ VERSION = __package__version__ @@ -20,7 +19,24 @@ def get_current_sha1() -> str: return qpc_commit try: repo_root = Path(__file__).absolute().parent.parent - repo = git.Repo(repo_root) - except git.exc.InvalidGitRepositoryError: + git_env = os.environ.copy() + git_env["LANG"] = "C" + git_env["LC_ALL"] = "C" + git_result = subprocess.run( + ("git", "rev-parse", "HEAD"), + env=git_env, + cwd=repo_root, + capture_output=True, + check=True, + ) + except (FileNotFoundError, subprocess.CalledProcessError): + # FileNotFoundError raises when `git` program not found. + # CalledProcessError raises when `git` has non-zero return code. + return "UNKNOWN" + git_sha1 = git_result.stdout.decode().split("\n")[0] + try: + int(git_sha1, 16) + except ValueError: + # ValueError raises when the string does not contain a hexadecimal value. return "UNKNOWN" - return repo.rev_parse("HEAD").hexsha + return git_sha1 diff --git a/qpc/source/__init__.py b/qpc/source/__init__.py index 87b7286d..3371f883 100644 --- a/qpc/source/__init__.py +++ b/qpc/source/__init__.py @@ -12,7 +12,7 @@ OPENSHIFT_SOURCE_TYPE = "openshift" SATELLITE_SOURCE_TYPE = "satellite" VCENTER_SOURCE_TYPE = "vcenter" -ACS_SOURCE_TYPE = "acs" +RHACS_SOURCE_TYPE = "rhacs" SOURCE_URI = "/api/v1/sources/" SOURCE_TYPE_CHOICES = [ @@ -21,7 +21,7 @@ OPENSHIFT_SOURCE_TYPE, SATELLITE_SOURCE_TYPE, VCENTER_SOURCE_TYPE, - ACS_SOURCE_TYPE, + RHACS_SOURCE_TYPE, ] BOOLEAN_CHOICES = ["true", "false"] diff --git a/qpc/test_release.py b/qpc/test_release.py index f6ede259..67ca934d 100644 --- a/qpc/test_release.py +++ b/qpc/test_release.py @@ -1,4 +1,5 @@ """Test the `qpc.release` module.""" +import subprocess from unittest import mock from qpc import release @@ -12,23 +13,31 @@ def test_get_current_sha1_uses_env_var(): assert actual_value == expected_value, "failed to get value from environment" -def test_get_current_sha1_uses_git(): - """Test getting the actual current from git.""" +@mock.patch("qpc.release.subprocess.run") +@mock.patch.dict(release.os.environ, {"QPC_COMMIT": ""}) +def test_get_current_sha1_uses_git(mock_run): + """Test getting the actual current commit from git.""" expected_value = "DECAFBAD" - with mock.patch.dict(release.os.environ, {"QPC_COMMIT": ""}), mock.patch( - "git.Repo.rev_parse" - ) as mock_rev_parse: - mock_rev_parse.return_value.hexsha = expected_value - actual_value = release.get_current_sha1() + mock_run.return_value.stdout = expected_value.encode() + actual_value = release.get_current_sha1() assert actual_value == expected_value, "failed to get SHA-1 value from git" -def test_get_current_sha1_unknown(): +@mock.patch("qpc.release.subprocess.run") +@mock.patch.dict(release.os.environ, {"QPC_COMMIT": ""}) +def test_get_current_sha1_unknown_no_git_repo(mock_run): """Test trying to get the SHA-1 when the env var and git repo are both missing.""" expected_value = "UNKNOWN" - with mock.patch.dict(release.os.environ, {"QPC_COMMIT": ""}), mock.patch( - "git.Repo" - ) as mock_repo_class: - mock_repo_class.side_effect = release.git.exc.InvalidGitRepositoryError - actual_value = release.get_current_sha1() + mock_run.side_effect = subprocess.CalledProcessError(returncode=420, cmd="git") + actual_value = release.get_current_sha1() + assert actual_value == expected_value, "failed to get UNKNOWN value" + + +@mock.patch("qpc.release.subprocess.run") +@mock.patch.dict(release.os.environ, {"QPC_COMMIT": ""}) +def test_get_current_sha1_unknown_unexpected_git_stdout(mock_run): + """Test trying to get the SHA-1 when the git outputs a non-hexadecimal value.""" + expected_value = "UNKNOWN" + mock_run.return_value.stdout = "this is not hexadecimal".encode() + actual_value = release.get_current_sha1() assert actual_value == expected_value, "failed to get UNKNOWN value" diff --git a/qpc/utils.py b/qpc/utils.py index a8e6e3f2..5a80bd19 100644 --- a/qpc/utils.py +++ b/qpc/utils.py @@ -51,7 +51,7 @@ LOG_LEVEL_INFO = 0 -QPC_MIN_SERVER_VERSION = "1.4.0" +QPC_MIN_SERVER_VERSION = "1.4.3" logging.captureWarnings(True) logger = logging.getLogger(__name__) diff --git a/requirements-build.txt b/requirements-build.txt index 54bee52a..0e267eeb 100644 --- a/requirements-build.txt +++ b/requirements-build.txt @@ -32,19 +32,18 @@ pycparser==2.21 ; python_version >= "3.11" and python_version < "4.0" # via cffi semantic-version==2.10.0 # via setuptools-rust -setuptools-rust==1.7.0 +setuptools-rust==1.8.1 # via cryptography setuptools-scm==8.0.4 # via # pluggy # setuptools-rust -trove-classifiers==2023.9.19 + # setuptools-scm +trove-classifiers==2023.10.18 # via hatchling typing-extensions==4.8.0 - # via - # setuptools-rust - # setuptools-scm -wheel==0.41.2 + # via setuptools-scm +wheel==0.41.3 # via cryptography # The following packages are considered to be unsafe in a requirements file: @@ -53,7 +52,6 @@ setuptools==68.2.2 ; python_version >= "3.11" and python_version < "4.0" # calver # cffi # cryptography - # gitpython # pathspec # pluggy # setuptools-rust diff --git a/requirements.txt b/requirements.txt index 4da260a7..45180c9b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,12 +2,9 @@ certifi==2023.7.22 ; python_version >= "3.11" and python_version < "4.0" cffi==1.16.0 ; python_version >= "3.11" and python_version < "4.0" charset-normalizer==3.3.0 ; python_version >= "3.11" and python_version < "4.0" cryptography==41.0.4 ; python_version >= "3.11" and python_version < "4.0" -gitdb==4.0.10 ; python_version >= "3.11" and python_version < "4.0" -gitpython==3.1.37 ; python_version >= "3.11" and python_version < "4.0" idna==3.4 ; python_version >= "3.11" and python_version < "4.0" packaging==23.2 ; python_version >= "3.11" and python_version < "4.0" pycparser==2.21 ; python_version >= "3.11" and python_version < "4.0" requests==2.31.0 ; python_version >= "3.11" and python_version < "4.0" setuptools==67.8.0 ; python_version >= "3.11" and python_version < "4.0" -smmap==5.0.1 ; python_version >= "3.11" and python_version < "4.0" urllib3==2.0.6 ; python_version >= "3.11" and python_version < "4.0"