Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 48 additions & 26 deletions src/daqpytools/apps/logging_demonstrator.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,7 +296,7 @@
@click.option(
"-e",
"--ersprotobufstream",
is_flag=True,
type=str,
help=(
"Set up an ERS handler, and publish to ERS"
)
Expand Down Expand Up @@ -364,7 +364,7 @@
stream_handlers: bool,
child_logger: bool,
disable_logger_inheritance: bool,
ersprotobufstream: bool,
ersprotobufstream: str,
handlertypes:bool,
handlerconf:bool,
throttle: bool,
Expand All @@ -384,7 +384,7 @@
disable_logger_inheritance (bool): If true, disable logger inheritance so each
logger instance only uses the logger handlers assigned to the given logger
instance.
ersprotobufstream (bool): If true, sets up an ERS protobuf handler. Error msg
ersprotobufstream (str): Sets up an ERS protobuf handler with supplied session name. Error msg

Check failure on line 387 in src/daqpytools/apps/logging_demonstrator.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (E501)

src/daqpytools/apps/logging_demonstrator.py:387:89: E501 Line too long (102 > 88)
are demonstrated in the HandlerType demonstration, requiring handlerconf
to be set to true. The topic for these tests is session_tester.
handlertypes (bool): If true, demonstrates the advanced feature of HandlerTypes.
Expand All @@ -401,36 +401,58 @@
LoggerSetupError: If no handlers are set up for the logger.
"""
logger_name = "daqpytools_logging_demonstrator"

os.environ["DUNEDAQ_ERS_WARNING"] = "erstrace,throttle,lstdout"
os.environ["DUNEDAQ_ERS_INFO"] = "lstderr,throttle,lstdout"
os.environ["DUNEDAQ_ERS_FATAL"] = "rich,lstdout"
os.environ["DUNEDAQ_ERS_ERROR"] = (
"erstrace,"
"throttle,"
"lstdout,"
"protobufstream(monkafka.cern.ch:30092)"
)

main_logger: logging.Logger = get_daq_logger(
logger_name=logger_name,
log_level=log_level,
use_parent_handlers=not disable_logger_inheritance,
rich_handler=rich_handler,
file_handler_path=file_handler_path,
stream_handlers=stream_handlers,
ers_kafka_handler=ersprotobufstream,
throttle=throttle
rich_handler=False,
setup_ers_handlers=True,
ers_kafka_handler="session_temp"
)

if not suppress_basic:
test_main_functions(main_logger)
main_logger.warning("test")


# main_logger: logging.Logger = get_daq_logger(
# logger_name=logger_name,

Check failure on line 427 in src/daqpytools/apps/logging_demonstrator.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (ERA001)

src/daqpytools/apps/logging_demonstrator.py:427:5: ERA001 Found commented-out code
# log_level=log_level,

Check failure on line 428 in src/daqpytools/apps/logging_demonstrator.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (ERA001)

src/daqpytools/apps/logging_demonstrator.py:428:5: ERA001 Found commented-out code
# use_parent_handlers=not disable_logger_inheritance,

Check failure on line 429 in src/daqpytools/apps/logging_demonstrator.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (ERA001)

src/daqpytools/apps/logging_demonstrator.py:429:5: ERA001 Found commented-out code
# rich_handler=rich_handler,

Check failure on line 430 in src/daqpytools/apps/logging_demonstrator.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (ERA001)

src/daqpytools/apps/logging_demonstrator.py:430:5: ERA001 Found commented-out code
# file_handler_path=file_handler_path,

Check failure on line 431 in src/daqpytools/apps/logging_demonstrator.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (ERA001)

src/daqpytools/apps/logging_demonstrator.py:431:5: ERA001 Found commented-out code
# stream_handlers=stream_handlers,

Check failure on line 432 in src/daqpytools/apps/logging_demonstrator.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (ERA001)

src/daqpytools/apps/logging_demonstrator.py:432:5: ERA001 Found commented-out code
# ers_kafka_handler=ersprotobufstream,

Check failure on line 433 in src/daqpytools/apps/logging_demonstrator.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (ERA001)

src/daqpytools/apps/logging_demonstrator.py:433:5: ERA001 Found commented-out code
# throttle=throttle

Check failure on line 434 in src/daqpytools/apps/logging_demonstrator.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (ERA001)

src/daqpytools/apps/logging_demonstrator.py:434:5: ERA001 Found commented-out code
# )

Check failure on line 435 in src/daqpytools/apps/logging_demonstrator.py

View workflow job for this annotation

GitHub Actions / ruff

ruff (ERA001)

src/daqpytools/apps/logging_demonstrator.py:435:5: ERA001 Found commented-out code

# if not suppress_basic:
# test_main_functions(main_logger)

if child_logger:
test_child_logger(
logger_name,
log_level,
disable_logger_inheritance,
rich_handler,
file_handler_path,
stream_handlers
)
# if child_logger:
# test_child_logger(
# logger_name,
# log_level,
# disable_logger_inheritance,
# rich_handler,
# file_handler_path,
# stream_handlers
# )

if throttle:
test_throttle(main_logger)
if handlertypes:
test_handlertypes(main_logger)
if handlerconf:
test_handlerconf(main_logger)
# if throttle:
# test_throttle(main_logger)
# if handlertypes:
# test_handlertypes(main_logger)
# if handlerconf:
# test_handlerconf(main_logger)


if __name__ == "__main__":
Expand Down
120 changes: 119 additions & 1 deletion src/daqpytools/logging/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ class ERSPyLogHandlerConf:
are not yet supported.
"""
handlers: list = field(default_factory = lambda: [])
protobufconf: ProtobufConf = field(default_factory = lambda: ProtobufConf())
protobufconf: ProtobufConf = field(default_factory = lambda: None)

@dataclass
class LogHandlerConf:
Expand Down Expand Up @@ -275,6 +275,7 @@ def _convert_str_to_handlertype(handler_str: str) -> tuple[HandlerType,
converts "protobufstream(url:port)" to return both the HandlerType and the
protobuf configuration
"""
# print(f"{handler_str=}")
if "erstrace" in handler_str:
msg = (
"ERSTrace is a C++ implementation, "
Expand All @@ -297,10 +298,12 @@ def _make_ers_handler_conf(ers_log_level :str) -> ERSPyLogHandlerConf:
"""Generates the ERSPyLogHandlerConf from reading an environment variable."""
erspyloghandlerconf = ERSPyLogHandlerConf()
envvalue = os.getenv(ers_log_level)
# print(f"{envvalue=}")
if envvalue is None:
raise ERSEnvError(ers_log_level)

for h in envvalue.split(","):
# print(f"{h=}")
handlertype, kafkaconf = LogHandlerConf._convert_str_to_handlertype(h)
erspyloghandlerconf.handlers.append(handlertype)
if kafkaconf:
Expand Down Expand Up @@ -543,6 +546,19 @@ def _format_timestamp(timestamp: float) -> str:
padding: int = LOG_RECORD_PADDING.get("time", 25)
time_str: str = dt.strftime(DATE_TIME_BASE_FORMAT).ljust(padding)[:padding]
return Text(time_str, style="logging.time")


def add_throttle_filter(log: logging.Logger) -> None:
"""Add the Throttle filter to the logger.

Args:
log (logging.Logger): Logger to add the rich handler to.

Returns:
None
"""
log.addFilter(ThrottleFilter())
return

def check_parent_handlers(
log: logging.Logger,
Expand Down Expand Up @@ -717,3 +733,105 @@ def add_file_handler(log: logging.Logger, use_parent_handlers: bool, path: str)
log.addHandler(file_handler)
return


def _logger_has_handler(
log: logging.Logger,
handler_type: type[logging.Handler],
target_stream: io.IOBase | None = None,
) -> bool:
"""Check if logger already has a matching handler.

For StreamHandler, ``target_stream`` can be used to distinguish stdout/stderr.
"""
type_matches = [isinstance(handler, handler_type) for handler in log.handlers]
stream_matches = [
handler.stream is target_stream if target_stream else False
for handler in log.handlers
if isinstance(handler, logging.StreamHandler)
]
return any(type_matches + stream_matches)


def _logger_has_filter(log: logging.Logger, filter_type: type[logging.Filter]) -> bool:
"""Check if logger already has a matching filter type."""
return any(isinstance(logger_filter, filter_type) for logger_filter in log.filters)


def add_handlers_from_types(
log: logging.Logger,
handler_types: set[HandlerType],
ers_session_name: str | None,
) -> None:
"""Add handlers to a logger based on HandlerType values.

This helper intentionally supports only the default options for now:
- ``use_parent_handlers`` is always True.
- ``HandlerType.File`` is not supported and raises immediately.
- ``HandlerType.Protobufstream`` requires ``ers_session_name``.
"""
if HandlerType.File in handler_types:
err_msg = "HandlerType.File is not supported by add_handlers_from_types"
raise ValueError(err_msg)

if HandlerType.Protobufstream in handler_types and not ers_session_name:
err_msg = "ers_session_name is required for HandlerType.Protobufstream"
raise ValueError(err_msg)

effective_handler_types = set(handler_types)
if HandlerType.Stream in effective_handler_types:
effective_handler_types.update({HandlerType.Lstdout, HandlerType.Lstderr})

existing_stream_handlers = {
HandlerType.Lstdout
if _logger_has_handler(
log, logging.StreamHandler, target_stream=cast(io.IOBase, sys.stdout)
)
else None,
HandlerType.Lstderr
if _logger_has_handler(
log, logging.StreamHandler, target_stream=cast(io.IOBase, sys.stderr)
)
else None,
}
existing_stream_handlers.discard(None)

existing_handlers = {
HandlerType.Rich if _logger_has_handler(log, FormattedRichHandler) else None,
HandlerType.Protobufstream
if _logger_has_handler(log, ERSKafkaLogHandler)
else None,
HandlerType.Throttle if _logger_has_filter(log, ThrottleFilter) else None,
}
existing_handlers.discard(None)
existing_handlers.update(existing_stream_handlers)

dispatch = {
HandlerType.Rich: lambda: add_rich_handler(log, True),
HandlerType.Lstdout: lambda: add_stdout_handler(log, True),
HandlerType.Lstderr: lambda: add_stderr_handler(log, True),
HandlerType.Protobufstream: lambda: add_ers_kafka_handler(
log, True, ers_session_name
),
HandlerType.Throttle: lambda: add_throttle_filter(log)
}

#! Try to revisit this logic

install_order = [
HandlerType.Rich,
HandlerType.Lstdout,
HandlerType.Lstderr,
HandlerType.Protobufstream,
HandlerType.Throttle,
]

for handler_type in install_order:
if handler_type not in effective_handler_types:
continue
if handler_type in existing_handlers:
continue
installer = dispatch.get(handler_type)
if installer is None:
continue
installer()

39 changes: 32 additions & 7 deletions src/daqpytools/logging/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,9 @@

from daqpytools.logging.exceptions import LoggerSetupError
from daqpytools.logging.handlers import (
ThrottleFilter,
LogHandlerConf,
add_handlers_from_types,
add_throttle_filter,
add_ers_kafka_handler,
add_file_handler,
add_rich_handler,
Expand Down Expand Up @@ -71,8 +73,10 @@ def get_daq_logger(
rich_handler: bool = False,
file_handler_path: str | None = None,
stream_handlers: bool = False,
ers_kafka_handler: bool = False,
throttle: bool = False
ers_kafka_handler: str | None = None,
throttle: bool = False,

setup_ers_handlers: bool = False,
) -> logging.Logger:
"""C'tor for the default logging instances.

Expand All @@ -84,7 +88,7 @@ def get_daq_logger(
file_handler_path (str | None): Path to the file handler log file. If None, no
file handler is added.
stream_handlers (bool): Whether to add both stdout and stderr stream handlers.
ers_kafka_handler (bool): Whether to add an ERS protobuf handler.
ers_kafka_handler (str): Whether to add an ERS protobuf handler. str is session name
throttle (bool): Whether to add the throttle filter or not. Note, does not mean
outputs are filtered by default! See ThrottleFilter for details.

Expand Down Expand Up @@ -149,12 +153,33 @@ def get_daq_logger(
if stream_handlers:
add_stdout_handler(logger, use_parent_handlers)
add_stderr_handler(logger, use_parent_handlers)
if ers_kafka_handler:
add_ers_kafka_handler(logger, use_parent_handlers, "session_tester")
if ers_kafka_handler:
add_ers_kafka_handler(logger, use_parent_handlers, ers_kafka_handler)

if throttle:
# Note: Default parameters used. No functionality on customisability yet
logger.addFilter(ThrottleFilter())
add_throttle_filter(logger)



if setup_ers_handlers:

# need to grab the list of relevant handlers that exist in ERS
#! This is very dependent on ERS env variables existing!!!
lhc_conf = LogHandlerConf._get_oks_conf()
all_handlers = {handler for handler_conf in lhc_conf.values() for handler in handler_conf.handlers}

print(all_handlers)

add_handlers_from_types(logger, all_handlers, ers_kafka_handler)

# now what.. Well we have a list of handlers to add now huh..







# Set log level for all handlers if requested
if log_level is not logging.NOTSET:
Expand Down