Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Automatic Logger Level Update #1206

Merged
merged 8 commits into from
Sep 15, 2022
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
automatic logger level update
  • Loading branch information
williamwen42 committed Sep 12, 2022
commit 1fce440a54c77d20ce79d1789b8a6c2266366a92
2 changes: 1 addition & 1 deletion pytest.ini
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[pytest]
testpaths =
test
log_cli = True
log_cli = False
log_cli_level = INFO
6 changes: 3 additions & 3 deletions test/test_recompile_ux.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def model(input):
with unittest.mock.patch.object(
torchdynamo.config, "cache_size_limit", expected_recompiles
):
with self.assertLogs(level="WARNING") as logs:
with self.assertLogs(logger="torchdynamo", level="WARNING") as logs:
for _ in range(10):
bsz = torch.randint(low=0, high=1000, size=())
x = torch.randn((bsz, 3, 4))
Expand Down Expand Up @@ -152,7 +152,7 @@ def cache_fail_test(cached_input, missed_input, expected_failure):
# warmup
opt_func(cached_input)

with self.assertLogs(level="WARNING") as logs:
with self.assertLogs(logger="torchdynamo", level="WARNING") as logs:
opt_func = torchdynamo.optimize("eager")(func)
opt_func(missed_input)
self.assert_single_log_contains(logs, expected_failure)
Expand Down Expand Up @@ -190,7 +190,7 @@ def func(a, b):
# warmup
opt_func(a, b)

with self.assertLogs(level="WARNING") as logs:
with self.assertLogs(logger="torchdynamo", level="WARNING") as logs:
opt_func = torchdynamo.optimize("eager")(func)
opt_func(a, 1)
self.assert_single_log_contains(
Expand Down
58 changes: 56 additions & 2 deletions torchdynamo/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,63 @@
# WARN print warnings (including graph breaks)
# ERROR print exceptions (and what user code was being processed when it occurred)
log_level = logging.WARNING
# Verbose will print full stack traces on warnings and errors
verbose = False
def _get_loggers():
williamwen42 marked this conversation as resolved.
Show resolved Hide resolved
return [
logging.getLogger("torchdynamo"),
logging.getLogger("torchinductor"),
]
def _set_loggers_level(level):
for logger in _get_loggers():
logger.setLevel(level)
_set_loggers_level(log_level)
williamwen42 marked this conversation as resolved.
Show resolved Hide resolved

# the name of a file to write the logs to
log_file_name = None

LOGGING_CONFIG = {
"version": 1,
"formatters": {
"torchdynamo_format": {"format": "%(name)s: [%(levelname)s] %(message)s"},
},
"handlers": {
"torchdynamo_console": {
"class": "logging.StreamHandler",
"level": "DEBUG",
"formatter": "torchdynamo_format",
"stream": "ext://sys.stdout",
},
},
"loggers": {
"torchdynamo": {
"level": "DEBUG",
"handlers": ["torchdynamo_console"],
"propagate": False,
},
"torchinductor": {
"level": "DEBUG",
"handlers": ["torchdynamo_console"],
"propagate": False,
},
},
"disable_existing_loggers": False,
}


def init_logging():
if "PYTEST_CURRENT_TEST" not in os.environ:
logging.config.dictConfig(LOGGING_CONFIG)
# previous dictConfig call may overwrite logger levels
_set_loggers_level(log_level)
if log_file_name is not None:
log_file = logging.FileHandler(log_file_name)
log_file.setLevel(log_level)
for logger in _get_loggers():
logger.addHandler(log_file)
williamwen42 marked this conversation as resolved.
Show resolved Hide resolved


# Verbose will print full stack traces on warnings and errors
verbose = False

# verify the correctness of optimized backend
verify_correctness = False

Expand Down Expand Up @@ -130,6 +181,9 @@ class _AccessLimitingConfig(ModuleType):
def __setattr__(self, name, value):
if name not in _allowed_config_names:
raise AttributeError(f"{__name__}.{name} does not exist")
# automatically set logger level whenever config.log_level is modified
if name == "log_level":
_set_loggers_level(value)
return object.__setattr__(self, name, value)


Expand Down
2 changes: 1 addition & 1 deletion torchdynamo/convert_frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from .bytecode_analysis import remove_pointless_jumps
from .bytecode_transformation import is_generator
from .bytecode_transformation import transform_code_object
from .config import init_logging
from .eval_frame import TorchPatcher
from .eval_frame import WrapperBackend
from .eval_frame import always_optimize_code_objects
Expand All @@ -36,7 +37,6 @@
from .utils import filter_stack
from .utils import format_bytecode
from .utils import guard_failures
from .utils import init_logging
from .utils import is_namedtuple
from .utils import istype
from .utils import orig_code_map
Expand Down
2 changes: 0 additions & 2 deletions torchdynamo/testing.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import contextlib
import dis
import functools
import logging
import os.path
import types
import unittest
Expand Down Expand Up @@ -194,7 +193,6 @@ def tearDownClass(cls):
@classmethod
def setUpClass(cls):
cls._exit_stack = contextlib.ExitStack()
cls._exit_stack.enter_context(patch.object(config, "log_level", logging.DEBUG))
cls._exit_stack.enter_context(
patch.object(config, "raise_on_backend_error", True)
)
Expand Down
51 changes: 3 additions & 48 deletions torchdynamo/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,6 @@
from torch import fx
from torch.nn.modules.lazy import LazyModuleMixin

import torchdynamo.config

from . import config

counters = collections.defaultdict(collections.Counter)
Expand Down Expand Up @@ -116,49 +114,6 @@ def fmt_fn(values, item_fn=lambda x: x):
return headers, values


LOGGING_CONFIG = {
"version": 1,
"formatters": {
"torchdynamo_format": {"format": "%(name)s: [%(levelname)s] %(message)s"},
},
"handlers": {
"torchdynamo_console": {
"class": "logging.StreamHandler",
"level": "DEBUG",
"formatter": "torchdynamo_format",
"stream": "ext://sys.stdout",
},
},
"loggers": {
"torchdynamo": {
"level": "DEBUG",
"handlers": ["torchdynamo_console"],
"propagate": False,
},
"torchinductor": {
"level": "DEBUG",
"handlers": ["torchdynamo_console"],
"propagate": False,
},
},
"disable_existing_loggers": False,
}


def init_logging():
if "PYTEST_CURRENT_TEST" not in os.environ:
logging.config.dictConfig(LOGGING_CONFIG)
td_logger = logging.getLogger("torchdynamo")
td_logger.setLevel(config.log_level)
ti_logger = logging.getLogger("torchinductor")
ti_logger.setLevel(config.log_level)
if config.log_file_name is not None:
log_file = logging.FileHandler(config.log_file_name)
log_file.setLevel(config.log_level)
td_logger.addHandler(log_file)
ti_logger.addHandler(log_file)


# filter out all frames after entering dynamo
def filter_stack(stack):
user_stack = []
Expand Down Expand Up @@ -779,14 +734,14 @@ def format_func_info(code):

@contextlib.contextmanager
def disable_cache_limit():
prior = torchdynamo.config.cache_size_limit
torchdynamo.config.cache_size_limit = sys.maxsize
prior = config.cache_size_limit
config.cache_size_limit = sys.maxsize

try:
yield
finally:
pass
torchdynamo.config.cache_size_limit = prior
config.cache_size_limit = prior


# map from transformed code back to original user code
Expand Down
2 changes: 1 addition & 1 deletion torchinductor/debug.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@
from torch.fx.passes.tools_common import legalize_graph

import torchinductor
from torchdynamo.config import init_logging
from torchdynamo.debug_utils import save_graph_repro
from torchdynamo.debug_utils import wrap_compiler_debug
from torchdynamo.utils import init_logging

from . import config
from . import ir
Expand Down