Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/issue184 loguru debug #204

Open
wants to merge 2 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions logger_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
"""This test is to test that logging works properly when importing torchquad."""

import unittest
from contextlib import contextmanager
from loguru import logger

@contextmanager
def capture_logs(level = "INFO", format="{message}"):
""""Capture loguru-based logs."""
output = []
handler_id = logger.add(output.append, level = level, format = format)

yield output
logger.remove(handler_id)

class TestLogger(unittest.TestCase):

def test_logging_with_tq(self):
import torchquad
with capture_logs() as cap_log:
logger.info("This message should print.")
self.assertEqual(cap_log,["This message should print.\n"])

1 change: 0 additions & 1 deletion torchquad/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,5 +46,4 @@
"_deployment_test",
]

set_log_level(os.environ.get("TORCHQUAD_LOG_LEVEL", "WARNING"))
logger.info("Initializing torchquad.")
1 change: 1 addition & 0 deletions torchquad/integration/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ def _setup_integration_domain(dim, integration_domain, backend):
# ignored unless its backend and the backend argument are the same.
domain_arg_backend = infer_backend(integration_domain)
convert_to_tensor = domain_arg_backend == "builtins"

if not convert_to_tensor and backend is not None and domain_arg_backend != backend:
logger.warning(
"integration_domain should be a list when the backend argument is set."
Expand Down
4 changes: 3 additions & 1 deletion torchquad/integration/vegas_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ def __init__(self, N_intervals, dim, backend, dtype, alpha=0.5) -> None:

# Boundary locations x_edges and subdomain stepsizes dx_edges
# Subdivide the domain [0,1]^dim equally spaced in N-d, EQ 8

self.dx_edges = (
anp.ones((self.dim, self.N_intervals), dtype=self.dtype, like=self.backend)
/ self.N_intervals
Expand Down Expand Up @@ -195,7 +196,8 @@ def _reset_weight(self):

def update_map(self):
"""Update the adaptive map, Section II C."""
smoothed_weights = self._smooth_map(self.weights, self.counts, self.alpha)
#smoothed_weights = self._smooth_map(self.weights, self.counts, self.alpha)
smoothed_weights = None
if smoothed_weights is None:
logger.warning(
"Cannot update the VEGASMap. This can happen with an integrand "
Expand Down