From 3eeb695975634d657437f60d5231a9f4389d4548 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Mon, 26 Jan 2026 16:06:36 -0500 Subject: [PATCH 01/66] update wrapmain() to reset logging. --- synapse/lib/cmd.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/lib/cmd.py b/synapse/lib/cmd.py index 265bfd566d9..d273d93533b 100644 --- a/synapse/lib/cmd.py +++ b/synapse/lib/cmd.py @@ -63,6 +63,9 @@ async def wrapmain(func, logconf=None): # pragma: no cover return 1 finally: + # Shutdown logging so that we aren't awaiting the pumptask + s_logging.reset() + await s_coro.await_bg_tasks(timeout=10) def exitmain(func, logconf=None): # pragma: no cover From c27d66d2b5814459d6bec551e27c4d35b448e2a1 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Mon, 26 Jan 2026 16:12:03 -0500 Subject: [PATCH 02/66] updated base.main() to reset logging. --- synapse/lib/base.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/synapse/lib/base.py b/synapse/lib/base.py index eebc3792323..ef7ebd85c1b 100644 --- a/synapse/lib/base.py +++ b/synapse/lib/base.py @@ -597,6 +597,11 @@ async def main(self, timeout=BASE_MAIN_BG_TASK_TIMEOUT): # pragma: no cover ''' await self.addSignalHandlers() await self.waitfini() + # TODO Fix this deferred import? + import synapse.lib.logging as s_logging + # Reset logging to stop the pump task so we're not awaiting + # it when shutting down a service. + s_logging.reset() await s_coro.await_bg_tasks(timeout) def waiter(self, count, *names, timeout=None): From f6bd3a01f7cf74754907fa8549412e5b7c5692f2 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Mon, 26 Jan 2026 16:34:42 -0500 Subject: [PATCH 03/66] FIXME - There is a issue with logging behavior and and messages logged AFTER reset is called are no longer being emitted by the handlers. Probably need to split the difference between a reset() method and a shutdown() method, where the shutdown method re-installs a stream handler so messages sent after shutdown are not lost to the ether. --- synapse/cortex.py | 2 ++ synapse/lib/base.py | 4 ++++ 2 files changed, 6 insertions(+) diff --git a/synapse/cortex.py b/synapse/cortex.py index 24f32190c63..d5caa396579 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -1640,6 +1640,8 @@ async def _addAllLayrRead(self): async def initServiceRuntime(self): + self.stray_base = await s_base.Base.anit() + # do any post-nexus initialization here... if self.isactive: await self._checkNexsIndx() diff --git a/synapse/lib/base.py b/synapse/lib/base.py index ef7ebd85c1b..0e3f983e363 100644 --- a/synapse/lib/base.py +++ b/synapse/lib/base.py @@ -40,13 +40,17 @@ def _fini_atexit(): # pragma: no cover if not item._fini_atexit and not OMIT_FINI_WARNS: if __debug__: logger.debug(f'At exit: Missing fini for {item}') + # FIXME - These logs are not emitted during the atexit call?? + print(f'At exit: Missing fini for {item}') for depth, call in enumerate(item.call_stack[:-2]): logger.debug(f'{depth+1:3}: {call.strip()}') + print(f'{depth+1:3}: {call.strip()}') continue try: if __debug__: logger.debug('At exit: Calling fini for %r', item) + print('At exit: Calling fini for %r', item) rv = item.fini() if s_coro.iscoro(rv): # Try to run the fini on its loop From fdc756fee0eef09337fdd961bc6c2ebc64c6a78d Mon Sep 17 00:00:00 2001 From: epiphyte Date: Mon, 26 Jan 2026 17:09:01 -0500 Subject: [PATCH 04/66] inline a few changes; add some comments for myself --- synapse/cortex.py | 7 +++---- synapse/tests/utils.py | 4 +++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index d5caa396579..5d8073026c6 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -6430,8 +6430,7 @@ async def _getStormEval(self, text): try: astvalu = copy.deepcopy(await s_parser.evalcache.aget(text)) except s_exc.FatalErr: # pragma: no cover - extra = self.getLogExtra(text=text) - logger.exception(f'Fatal error while parsing [{text}]', extra=extra) + logger.exception(f'Fatal error while parsing [{text}]', extra=self.getLogExtra(text=text)) await self.fini() raise astvalu.init(self) @@ -6441,8 +6440,7 @@ async def _getStormQuery(self, args): try: query = copy.deepcopy(await s_parser.querycache.aget(args)) except s_exc.FatalErr: # pragma: no cover - extra = self.getLogExtra(text=args[0]) - logger.exception(f'Fatal error while parsing [{args}]', extra=extra) + logger.exception(f'Fatal error while parsing [{args}]', extra = self.getLogExtra(text=args[0])) await self.fini() raise query.init(self) @@ -6495,6 +6493,7 @@ def _logStormQuery(self, text, user, info=None): info['username'] = user.name info['user'] = user.iden info['hash'] = s_storm.queryhash(text) + # TODO Understand why this is using s_logging.getLogExtra instead of self.getLogExtra extra = s_logging.getLogExtra(**info) stormlogger.log(self.stormloglvl, 'Executing storm query {%s} as [%s]', text, user.name, extra=extra) diff --git a/synapse/tests/utils.py b/synapse/tests/utils.py index 9e7d43d77e0..205a93720ec 100644 --- a/synapse/tests/utils.py +++ b/synapse/tests/utils.py @@ -1045,7 +1045,9 @@ def __init__(self, *args, **kwargs): def tearDown(self): s_logging.reset() - return super().tearDown() + # We should not need to call super here; unittest.TestCase does not have a tearDown(). + # Implementers have the burden of calling super().teardown() + # return super().tearDown() def checkNode(self, node, expected): ex_ndef, ex_props = expected From 44815382541e4c00f326b049354a27003b2c7b63 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Mon, 26 Jan 2026 17:38:15 -0500 Subject: [PATCH 05/66] Logging notes --- synapse/lib/logging.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index e4ef475705d..a0c59cccc75 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -19,6 +19,7 @@ _log_wins = weakref.WeakSet() +# TODO - Handle exception groups def excinfo(e, _seen=None): if _seen is None: @@ -270,6 +271,12 @@ def reset(): _glob_logconf.clear() _glob_loginfo.clear() +# TODO like reset; but reinstall a streamhandler to rootlogger +# so logger.log() calls continue to produce formatted content +# until interpreter shutdown. +def shutdown(): + pass + def getLogConf(): logconf = _glob_logconf.copy() logconf['loginfo'] = _glob_loginfo.copy() From 3f792afbadd423172abe0cf994c6d73de4052de9 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Mon, 26 Jan 2026 17:41:11 -0500 Subject: [PATCH 06/66] Add branch build for containers --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 551442a8c74..d335e461b6d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -626,6 +626,7 @@ workflows: branches: only: - master + - visi-logging-epiphyte-fbg - build_docker_tag: requires: From b68f15b979dc2922421a0df1b87542e9cae4b17c Mon Sep 17 00:00:00 2001 From: epiphyte Date: Mon, 26 Jan 2026 18:36:14 -0500 Subject: [PATCH 07/66] Fix whitespace --- synapse/cortex.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index 5d8073026c6..43f1714f2fd 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -6440,7 +6440,7 @@ async def _getStormQuery(self, args): try: query = copy.deepcopy(await s_parser.querycache.aget(args)) except s_exc.FatalErr: # pragma: no cover - logger.exception(f'Fatal error while parsing [{args}]', extra = self.getLogExtra(text=args[0])) + logger.exception(f'Fatal error while parsing [{args}]', extra=self.getLogExtra(text=args[0])) await self.fini() raise query.init(self) From 5a713cf3ae0ec5b7a3f8b0c9148a8ead273cd1fe Mon Sep 17 00:00:00 2001 From: epiphyte Date: Tue, 27 Jan 2026 08:20:22 -0500 Subject: [PATCH 08/66] Fix branch build configuration --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index d335e461b6d..8fb97177719 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -626,7 +626,7 @@ workflows: branches: only: - master - - visi-logging-epiphyte-fbg + - visi-logging-epiphyte-fb - build_docker_tag: requires: From f8c9bd84d41eaaefb623bbb0842302859a173f71 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Tue, 27 Jan 2026 09:19:22 -0500 Subject: [PATCH 09/66] Capture sorc=None as a sentinel so we always have a string value in the traceback array. --- synapse/lib/logging.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index a0c59cccc75..351fed03e90 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -29,6 +29,9 @@ def excinfo(e, _seen=None): tb = [] for path, line, func, sorc in traceback.extract_tb(e.__traceback__): + # sorc may not be available; enusre that all output is a str + if sorc is None: + sorc = '' tb.append((path, line, func, sorc)) ret = { From d108312486dbdc30bfa30e1c7c4e039e17bddf94 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Tue, 27 Jan 2026 10:24:34 -0500 Subject: [PATCH 10/66] Add a logging.shutdown() method which ensures that the logging queue is drained; reset, then a simple StreamHandler reinstalled. This allows for cleaner service / tool teardown as we've exited application logic at that point and may have atexit handler logic running. --- synapse/lib/base.py | 6 +---- synapse/lib/cmd.py | 4 +--- synapse/lib/logging.py | 50 +++++++++++++++++++++++++++++++++++------- 3 files changed, 44 insertions(+), 16 deletions(-) diff --git a/synapse/lib/base.py b/synapse/lib/base.py index 0e3f983e363..31774ca0651 100644 --- a/synapse/lib/base.py +++ b/synapse/lib/base.py @@ -40,17 +40,13 @@ def _fini_atexit(): # pragma: no cover if not item._fini_atexit and not OMIT_FINI_WARNS: if __debug__: logger.debug(f'At exit: Missing fini for {item}') - # FIXME - These logs are not emitted during the atexit call?? - print(f'At exit: Missing fini for {item}') for depth, call in enumerate(item.call_stack[:-2]): logger.debug(f'{depth+1:3}: {call.strip()}') - print(f'{depth+1:3}: {call.strip()}') continue try: if __debug__: logger.debug('At exit: Calling fini for %r', item) - print('At exit: Calling fini for %r', item) rv = item.fini() if s_coro.iscoro(rv): # Try to run the fini on its loop @@ -605,7 +601,7 @@ async def main(self, timeout=BASE_MAIN_BG_TASK_TIMEOUT): # pragma: no cover import synapse.lib.logging as s_logging # Reset logging to stop the pump task so we're not awaiting # it when shutting down a service. - s_logging.reset() + await s_logging.shutdown() await s_coro.await_bg_tasks(timeout) def waiter(self, count, *names, timeout=None): diff --git a/synapse/lib/cmd.py b/synapse/lib/cmd.py index d273d93533b..0eb6de4e81d 100644 --- a/synapse/lib/cmd.py +++ b/synapse/lib/cmd.py @@ -63,9 +63,7 @@ async def wrapmain(func, logconf=None): # pragma: no cover return 1 finally: - # Shutdown logging so that we aren't awaiting the pumptask - s_logging.reset() - + await s_logging.shutdown() await s_coro.await_bg_tasks(timeout=10) def exitmain(func, logconf=None): # pragma: no cover diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 351fed03e90..d336c51b3ca 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -19,6 +19,8 @@ _log_wins = weakref.WeakSet() +LOG_PUMP_TASK_TIMEOUT = int(os.environ.get('SYNDEV_LOG_TASK_SHUTDOWN_TIMEOUT', 1)) + # TODO - Handle exception groups def excinfo(e, _seen=None): @@ -137,6 +139,7 @@ class StreamHandler(logging.StreamHandler): _pump_task = None _pump_event = None + _pump_exit_flag = False _glob_handler = None _logs_fifo = collections.deque(maxlen=1000) @@ -176,6 +179,8 @@ async def _pumpLogStream(): if not logstodo and not texttodo: StreamHandler._pump_event.clear() + if StreamHandler._pump_exit_flag is True: + return continue StreamHandler._logs_todo.clear() @@ -189,6 +194,9 @@ async def _pumpLogStream(): await s_coro.executor(_writestderr, fulltext) + if StreamHandler._pump_exit_flag is True: + return + except Exception as e: traceback.print_exc() @@ -253,7 +261,7 @@ def setup(**conf): return conf -def reset(): +def reset(clear_globconf=True): # This may be called by tests to cleanup loop specific objects # ( it does not need to be called by in general by service fini ) @@ -266,19 +274,45 @@ def reset(): StreamHandler._pump_task = None StreamHandler._pump_event = None + StreamHandler._pump_exit_flag = False StreamHandler._glob_handler = None StreamHandler._text_todo.clear() StreamHandler._logs_fifo.clear() StreamHandler._logs_todo.clear() - _glob_logconf.clear() - _glob_loginfo.clear() + if clear_globconf: + _glob_logconf.clear() + _glob_loginfo.clear() -# TODO like reset; but reinstall a streamhandler to rootlogger -# so logger.log() calls continue to produce formatted content -# until interpreter shutdown. -def shutdown(): - pass +async def shutdown(): + ''' + Inverse of setup. Gives the pump task the opportunity to exit + before removing it and resetting log attributes. A StreamHandler + is then re-installed on the root logger to allow for messages + from sources like atexit handlers to be logged. + ''' + # Give the pump task a small opportunity to drain its + # queue of items and exit cleanly. + if StreamHandler._pump_task is not None: + StreamHandler._pump_exit_flag = True # Set the task to exit + StreamHandler._pump_event.set() # Wake the task + try: + await asyncio.wait_for(StreamHandler._pump_task, timeout=LOG_PUMP_TASK_TIMEOUT) + except asyncio.TimeoutError: + pass + + # Reset all logging configs except globals since we may need those. + reset(clear_globconf=False) + + fmtclass = JsonFormatter + if not _glob_logconf.get('structlog'): + fmtclass = TextFormatter + + # Reinstall a StreamHandler and the formatter on the root logger + rootlogger = logging.getLogger() + stream = logging.StreamHandler() + stream.setFormatter(fmtclass(datefmt=_glob_logconf.get('datefmt'))) + rootlogger.addHandler(stream) def getLogConf(): logconf = _glob_logconf.copy() From bbf25ed9f2bdb7bb544301756736562dc6533986 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Wed, 28 Jan 2026 15:43:47 -0500 Subject: [PATCH 11/66] Try some different values for the embedded ahasvcname field --- synapse/lib/cell.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index 18a18f261f0..2de5322adad 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -3878,6 +3878,7 @@ async def getCellApi(self, link, user, path): ''' return await self.cellapi.anit(self, link, user) + # Epiphyte - service to aha:service or servicename? def getLogExtra(self, **kwargs): ''' Get an extra dictionary for structured logging which can be used as a extra argument for loggers. @@ -3890,13 +3891,17 @@ def getLogExtra(self, **kwargs): ''' extra = s_logging.getLogExtra(**kwargs) if self.ahasvcname: - extra['loginfo']['service'] = self.ahasvcname + # extra['loginfo']['service'] = self.ahasvcname + extra['loginfo']['aha:service'] = self.ahasvcname + extra['loginfo']['aha_service'] = self.ahasvcname return extra def getLogConf(self): logconf = s_logging.getLogConf() if self.ahasvcname is not None: - logconf['loginfo']['service'] = self.ahasvcname + # logconf['loginfo']['service'] = self.ahasvcname + logconf['loginfo']['aha:service'] = self.ahasvcname + logconf['loginfo']['aha_service'] = self.ahasvcname return logconf def modCellConf(self, conf): From 22d4ae9182f9af84ed5bd2549efba43575baf34b Mon Sep 17 00:00:00 2001 From: epiphyte Date: Wed, 28 Jan 2026 22:31:09 -0500 Subject: [PATCH 12/66] Test with aha:service --- synapse/lib/cell.py | 7 +------ synapse/tests/test_lib_cell.py | 4 ++-- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index 2de5322adad..44b6956349b 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -1242,7 +1242,7 @@ async def __anit__(self, dirn, conf=None, readonly=False, parent=None): ahanetw = self.conf.get('aha:network') if ahaname is not None and ahanetw is not None: self.ahasvcname = f'{ahaname}.{ahanetw}' - s_logging.setLogInfo('service', self.ahasvcname) + s_logging.setLogInfo('aha:service', self.ahasvcname) # each cell has a guid path = s_common.genpath(self.dirn, 'cell.guid') @@ -3878,7 +3878,6 @@ async def getCellApi(self, link, user, path): ''' return await self.cellapi.anit(self, link, user) - # Epiphyte - service to aha:service or servicename? def getLogExtra(self, **kwargs): ''' Get an extra dictionary for structured logging which can be used as a extra argument for loggers. @@ -3891,17 +3890,13 @@ def getLogExtra(self, **kwargs): ''' extra = s_logging.getLogExtra(**kwargs) if self.ahasvcname: - # extra['loginfo']['service'] = self.ahasvcname extra['loginfo']['aha:service'] = self.ahasvcname - extra['loginfo']['aha_service'] = self.ahasvcname return extra def getLogConf(self): logconf = s_logging.getLogConf() if self.ahasvcname is not None: - # logconf['loginfo']['service'] = self.ahasvcname logconf['loginfo']['aha:service'] = self.ahasvcname - logconf['loginfo']['aha_service'] = self.ahasvcname return logconf def modCellConf(self, conf): diff --git a/synapse/tests/test_lib_cell.py b/synapse/tests/test_lib_cell.py index 783f1906bd4..3707740aa4c 100644 --- a/synapse/tests/test_lib_cell.py +++ b/synapse/tests/test_lib_cell.py @@ -3667,14 +3667,14 @@ async def test_cell_logging(self): # confirm last-one-wins "service" key is always initialized logger.warning('oh hai there!') mesg = stream.jsonlines()[0] - self.eq(mesg['service'], '00.cell.synapse') + self.eq(mesg['aha:service'], '00.cell.synapse') async with cell00.getLocalProxy() as proxy: logs = await proxy.logs() self.isin('oh hai there!', [m['message'] for m in logs]) - self.isin('00.cell.synapse', [m.get('service') for m in logs]) + self.isin('00.cell.synapse', [m.get('aha:service') for m in logs]) event = asyncio.Event() async def sendlogs(): From a9b125c42b5df9c01fdee8720c5142f2d2a2e084 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Wed, 28 Jan 2026 22:35:10 -0500 Subject: [PATCH 13/66] Fix datefmt issue in structlog test --- synapse/tests/test_lib_structlog.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/synapse/tests/test_lib_structlog.py b/synapse/tests/test_lib_structlog.py index bfb4b0610ca..8c05201bd35 100644 --- a/synapse/tests/test_lib_structlog.py +++ b/synapse/tests/test_lib_structlog.py @@ -94,6 +94,9 @@ def test_structlog_datefmt(self): handler = logging.StreamHandler(stream=stream) datefmt = '%m-%Y-%d' # MMYYYYYDD formatter = s_structlog.JsonFormatter(datefmt=datefmt) + # Default convertor for time is time.localtime but that is + # not stable for testing. Use time.gmtime so tests always work. + formatter.converter = time.gmtime handler.setFormatter(formatter) logger.addHandler(handler) From 592e930c33c47799c1345c6e571e2df8241ad3ef Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 29 Jan 2026 08:54:30 -0500 Subject: [PATCH 14/66] user is logged twice in _logStormQuery --- synapse/cortex.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index 43f1714f2fd..efcd4157963 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -6490,8 +6490,9 @@ def _logStormQuery(self, text, user, info=None): if info is None: info = {} info['text'] = text - info['username'] = user.name - info['user'] = user.iden + # username and user here are logged twice. + # info['username'] = user.name + # info['user'] = user.iden info['hash'] = s_storm.queryhash(text) # TODO Understand why this is using s_logging.getLogExtra instead of self.getLogExtra extra = s_logging.getLogExtra(**info) From 1cd9aee49030fb2bf5db4a77a9988fb2508abe21 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 29 Jan 2026 10:33:03 -0500 Subject: [PATCH 15/66] Cleanup some of the direct extra= construction. --- synapse/lib/agenda.py | 26 +++++++++++++------------- synapse/lib/modelrev.py | 4 ++-- synapse/lib/storm.py | 20 ++++++++++---------- 3 files changed, 25 insertions(+), 25 deletions(-) diff --git a/synapse/lib/agenda.py b/synapse/lib/agenda.py index 73aab17e3ab..156030bcd0c 100644 --- a/synapse/lib/agenda.py +++ b/synapse/lib/agenda.py @@ -783,8 +783,7 @@ async def runloop(self): if appt.isrunning: # pragma: no cover mesg = f'Appointment {appt.iden} {appt.name} is still running from previous time when scheduled' \ f' to run. Skipping.' - logger.warning(mesg, - extra={'synapse': {'iden': appt.iden, 'name': appt.name}}) + logger.warning(mesg, extra=self.core.getLogExtra(iden=appt.iden, name=appt.name)) else: try: await self._execute(appt) @@ -798,7 +797,7 @@ async def runloop(self): else: # pragma: no cover mesg = str(e) logger.exception(f'Agenda error running appointment {appt.iden} {appt.name}: {mesg}', - extra={'synapse': extra}) + extra=self.core.getLogExtra(**extra)) await self._markfailed(appt, f'error: {e}') async def _execute(self, appt): @@ -808,23 +807,23 @@ async def _execute(self, appt): user = self.core.auth.user(appt.creator) if user is None: logger.warning(f'Unknown user {appt.creator} in stored appointment {appt.iden} {appt.name}', - extra={'synapse': {'iden': appt.iden, 'name': appt.name, 'user': appt.creator}}) + extra = self.core.getLogExtra(iden=appt.iden, name=appt.name, user=appt.creator)) await self._markfailed(appt, 'unknown user') return locked = user.info.get('locked') if locked: logger.warning(f'Cron {appt.iden} {appt.name} failed because creator {user.name} is locked', - extra={'synapse': {'iden': appt.iden, 'name': appt.name, 'user': appt.creator, - 'username': user.name}}) + extra=self.core.getLogExtra(iden=appt.iden, name=appt.name, user=appt.creator, + username=user.name)) await self._markfailed(appt, 'locked user') return view = self.core.getView(iden=appt.view, user=user) if view is None: logger.warning(f'Unknown view {appt.view} in stored appointment {appt.iden} {appt.name}', - extra={'synapse': {'iden': appt.iden, 'name': appt.name, 'user': appt.creator, - 'username': user.name, 'view': appt.view}}) + extra=self.core.getLogExtra(iden=appt.iden, name=appt.name, user=appt.creator, + username=user.name, view=appt.view)) await self._markfailed(appt, 'unknown view') return @@ -863,8 +862,8 @@ async def _runJob(self, user, appt): await self.core.addCronEdits(appt.iden, edits) logger.info(f'Agenda executing for iden={appt.iden}, name={appt.name} user={user.name}, view={appt.view}, query={appt.query}', - extra={'synapse': {'iden': appt.iden, 'name': appt.name, 'user': user.iden, 'text': appt.query, - 'username': user.name, 'view': appt.view}}) + extra=self.core.getLogExtra(iden=appt.iden, name=appt.name, user=appt.creator, text=appt.query, + username=user.name, view=appt.iden)) starttime = self._getNowTick() success = False @@ -891,6 +890,7 @@ async def _runJob(self, user, appt): elif mesg[0] == 'warn' and loglevel <= logging.WARNING: text = mesg[1].get('mesg', '') + # FIXME This pattern of mixing known and unkonwn values in a kwargs function can throw a TypeError extra = self.core.getLogExtra(cron=appt.iden, **mesg[1]) logger.warning(f'Cron job {appt.iden} issued warning: {text}', extra=extra) @@ -908,7 +908,7 @@ async def _runJob(self, user, appt): except Exception as e: result = f'raised exception {e}' logger.exception(f'Agenda job {appt.iden} {appt.name} raised exception', - extra={'synapse': {'iden': appt.iden, 'name': appt.name}} + extra = self.core.getLogInfo(iden=appt.iden, name=appt.name), ) else: success = True @@ -932,8 +932,8 @@ async def _runJob(self, user, appt): f'took {took:.3f}s' if not self.core.isactive: mesg = mesg + ' Agenda status will not be saved since the Cortex is no longer the leader.' - logger.info(mesg, extra={'synapse': {'iden': appt.iden, 'name': appt.name, 'user': user.iden, - 'result': result, 'username': user.name, 'took': took}}) + logger.info(mesg, extra=self.core.getLogExtra(iden=appt.iden, name=appt.name, user=user.iden, + result=result, username=user.name, took=took)) edits = { 'lastfinishtime': finishtime, 'isrunning': False, diff --git a/synapse/lib/modelrev.py b/synapse/lib/modelrev.py index 8d35ebd9cbf..1e3f6dc4b35 100644 --- a/synapse/lib/modelrev.py +++ b/synapse/lib/modelrev.py @@ -932,7 +932,7 @@ async def save(): oldm = e.errinfo.get('mesg') iden = s_common.ehex(buid) logger.warning(f'error re-norming {prop.form.name}:{prop.name}={propvalu} (layer: {layr.iden}, node: {iden}): {oldm}', - extra={'synapse': {'node': iden, 'layer': layr.iden}}) + extra=self.core.getLogExtra(node=iden, layer=layr.iden)) continue if norm == propvalu: @@ -1001,7 +1001,7 @@ async def save(): oldm = e.errinfo.get('mesg') iden = s_common.ehex(buid) logger.warning(f'error re-norming {prop.full}={propvalu} (layer: {layr.iden}, node: {iden}): {oldm}', - extra={'synapse': {'node': iden, 'layer': layr.iden}}) + extra=self.core.getLogExtra(node=iden, layer=layr.iden)) continue nodeedits.append( diff --git a/synapse/lib/storm.py b/synapse/lib/storm.py index b45935b9452..a8109ac1d66 100644 --- a/synapse/lib/storm.py +++ b/synapse/lib/storm.py @@ -1231,16 +1231,16 @@ async def __anit__(self, core, iden, ddef): self.onfini(self.stop) async def stop(self): - logger.debug(f'Stopping Dmon {self.iden}', extra={'synapse': {'iden': self.iden}}) + logger.debug(f'Stopping Dmon {self.iden}', extra=self.core.getLogExtra(iden=self.iden)) if self.task is not None: self.task.cancel() self.task = None - logger.debug(f'Stopped Dmon {self.iden}', extra={'synapse': {'iden': self.iden}}) + logger.debug(f'Stopped Dmon {self.iden}', extra=self.core.getLogExtra(iden=self.iden)) async def run(self): if self.task: # pragma: no cover raise s_exc.SynErr(mesg=f'Dmon - {self.iden} - has a current task and cannot start a new one.', - iden=self.iden) + extra=self.core.getLogExtra(iden=self.iden)) self.task = self.schedCoro(self.dmonloop()) async def bump(self): @@ -1262,7 +1262,7 @@ def _getRunLog(self): async def dmonloop(self): - logger.debug(f'Starting Dmon {self.iden}', extra={'synapse': {'iden': self.iden}}) + logger.debug(f'Starting Dmon {self.iden}', extra=self.core.getLogExtra(iden=self.iden)) s_scope.set('user', self.user) s_scope.set('storm:dmon', self.iden) @@ -1283,26 +1283,26 @@ async def dmonloop(self): def dmonPrint(evnt): self._runLogAdd(evnt) mesg = evnt[1].get('mesg', '') - logger.info(f'Dmon - {self.iden} - {mesg}', extra={'synapse': {'iden': self.iden}}) + logger.info(f'Dmon - {self.iden} - {mesg}', extra=self.core.getLogExtra(iden=self.iden)) def dmonWarn(evnt): self._runLogAdd(evnt) mesg = evnt[1].get('mesg', '') - logger.warning(f'Dmon - {self.iden} - {mesg}', extra={'synapse': {'iden': self.iden}}) + logger.warning(f'Dmon - {self.iden} - {mesg}', extra=self.core.getLogExtra(iden=self.iden)) while not self.isfini: if self.user.info.get('locked'): self.status = 'fatal error: user locked' logger.warning(f'Dmon user is locked. Stopping Dmon {self.iden}.', - extra={'synapse': {'iden': self.iden}}) + extra=self.core.getLogExtra(iden=self.iden)) return view = self.core.getView(viewiden, user=self.user) if view is None: self.status = 'fatal error: invalid view' logger.warning(f'Dmon View is invalid. Stopping Dmon {self.iden}.', - extra={'synapse': {'iden': self.iden}}) + extra=self.core.getLogExtra(iden=self.iden)) return try: @@ -1320,7 +1320,7 @@ def dmonWarn(evnt): self.count += 1 await asyncio.sleep(0) - logger.warning(f'Dmon query exited: {self.iden}', extra={'synapse': {'iden': self.iden}}) + logger.warning(f'Dmon query exited: {self.iden}', extra=self.core.getLogExtra(iden=self.iden)) self.status = 'sleeping' @@ -1333,7 +1333,7 @@ def dmonWarn(evnt): except Exception as e: self._runLogAdd(('err', s_common.excinfo(e))) - logger.exception(f'Dmon error ({self.iden})', extra={'synapse': {'iden': self.iden}}) + logger.exception(f'Dmon error ({self.iden})', extra=self.core.getLogExtra(iden=self.iden)) self.status = f'error: {e}' self.err_evnt.set() From a7374f19ce2897a40014a2a8b88eb19de142b9c9 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 29 Jan 2026 10:33:10 -0500 Subject: [PATCH 16/66] Remove unused imports --- synapse/lib/logging.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index d336c51b3ca..7bbf6344d45 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -7,13 +7,11 @@ import collections import synapse.exc as s_exc -import synapse.common as s_common import synapse.lib.coro as s_coro import synapse.lib.json as s_json import synapse.lib.const as s_const import synapse.lib.scope as s_scope -import synapse.lib.version as s_version logger = logging.getLogger(__name__) From 30fb9808d2d5a87cb06fbb6f28d6a0f109b90c0b Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 29 Jan 2026 10:53:28 -0500 Subject: [PATCH 17/66] Additional extra cleanup --- synapse/datamodel.py | 8 +++++++- synapse/lib/auth.py | 14 +++++++------- synapse/lib/json.py | 10 ++++++---- 3 files changed, 20 insertions(+), 12 deletions(-) diff --git a/synapse/datamodel.py b/synapse/datamodel.py index 582a2127e53..d530552582c 100644 --- a/synapse/datamodel.py +++ b/synapse/datamodel.py @@ -936,9 +936,15 @@ def addType(self, typename, basename, typeopts, typeinfo, checks=True): self._modeldef['types'].append(newtype.getTypeDef()) def _checkTypeDef(self, typ): + if self.core is not None: + efunc = self.core.getLogExtra + else: + import synapse.lib.logging as s_logging + efunc = s_logging.getLogExtra + if 'comp' in typ.info.get('bases', ()): for fname, ftypename in typ.opts.get('fields', ()): - extra = {'synapse': {'type': typ.name, 'field': fname}} + extra = efunc(type=typ.name, field=fname) if isinstance(ftypename, (list, tuple)): ftypename = ftypename[0] diff --git a/synapse/lib/auth.py b/synapse/lib/auth.py index 86f8ef411d1..379449d8cc0 100644 --- a/synapse/lib/auth.py +++ b/synapse/lib/auth.py @@ -1459,7 +1459,7 @@ async def tryPasswd(self, passwd, nexs=True, enforce_policy=True): if await s_passwd.checkShadowV2(passwd=passwd, shadow=shadow): await self.auth.setUserInfo(self.iden, 'onepass', None) logger.debug(f'Used one time password for {self.name}', - extra={'synapse': {'user': self.iden, 'username': self.name}}) + extra=self.auth.nexroot.cell.getLogExtra(user=self.iden, username=self.name)) return True else: # Backwards compatible password handling @@ -1468,7 +1468,7 @@ async def tryPasswd(self, passwd, nexs=True, enforce_policy=True): if s_common.guid((params, passwd)) == hashed: await self.auth.setUserInfo(self.iden, 'onepass', None) logger.debug(f'Used one time password for {self.name}', - extra={'synapse': {'user': self.iden, 'username': self.name}}) + extra=self.auth.nexroot.cell.getLogExtra(user=self.iden, username=self.name)) return True shadow = self.info.get('passwd') @@ -1493,15 +1493,15 @@ async def tryPasswd(self, passwd, nexs=True, enforce_policy=True): if self.iden == self.auth.rootuser.iden: mesg = f'User {self.name} has exceeded the number of allowed password attempts ({valu + 1}),. Cannot lock {self.name} user.' - extra = {'synapse': {'target_user': self.iden, 'target_username': self.name, }} - logger.error(mesg, extra=extra) + logger.error(mesg, extra=self.auth.nexroot.cell.getLogExtra(target_user=self.iden, target_username=self.name)) return False await self.auth.nexsroot.cell.setUserLocked(self.iden, True) mesg = f'User {self.name} has exceeded the number of allowed password attempts ({valu + 1}), locking their account.' - extra = {'synapse': {'target_user': self.iden, 'target_username': self.name, 'status': 'MODIFY'}} - logger.warning(mesg, extra=extra) + logger.warning(mesg, extra=self.auth.nexroot.cell.getLogExtra(target_user=self.iden, + target_username=self.name, + status='MODIFY')) return False @@ -1511,7 +1511,7 @@ async def tryPasswd(self, passwd, nexs=True, enforce_policy=True): salt, hashed = shadow if s_common.guid((salt, passwd)) == hashed: logger.debug(f'Migrating password to shadowv2 format for user {self.name}', - extra={'synapse': {'user': self.iden, 'username': self.name}}) + extra=self.auth.nexroot.cell.getLogExtra(user=self.iden, username=self.name)) # Update user to new password hashing scheme. We cannot enforce policy # when migrating an existing password. await self.setPasswd(passwd=passwd, nexs=nexs, enforce_policy=False) diff --git a/synapse/lib/json.py b/synapse/lib/json.py index 6c267cc0ff7..9764a8f8342 100644 --- a/synapse/lib/json.py +++ b/synapse/lib/json.py @@ -44,8 +44,9 @@ def loads(s: str | bytes) -> Any: return yyjson.Document(s, flags=yyjson.ReaderFlags.BIGNUM_AS_RAW).as_obj except (ValueError, TypeError) as exc: - extra = {'synapse': {'fn': 'loads', 'reason': str(exc)}} - logger.warning('Using fallback JSON deserialization. Please report this to Vertex.', extra=extra) + import synapse.lib.logging as s_logging + logger.warning('Using fallback JSON deserialization. Please report this to Vertex.', + extra=s_logging.getLogExtra(fn='loads', reason=(str(exc)))) return _fallback_loads(s) def load(fp: BinaryIO) -> Any: @@ -125,8 +126,9 @@ def dumps(obj: Any, sort_keys: bool = False, indent: bool = False, default: Opti try: return _dumps(obj, sort_keys=sort_keys, indent=indent, default=default, newline=newline) except UnicodeEncodeError as exc: - extra = {'synapse': {'fn': 'dumps', 'reason': str(exc)}} - logger.warning('Using fallback JSON serialization. Please report this to Vertex.', extra=extra) + import synapse.lib.logging as s_logging + logger.warning('Using fallback JSON serialization. Please report this to Vertex.', + extra=s_logging.getLogExtra(fn='dumps', reason=(str(exc)))) ret = _fallback_dumps(obj, sort_keys=sort_keys, indent=indent, default=default) From ba94eaee12b464b1cce50e8bfd31e375cd42b58f Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 29 Jan 2026 10:55:02 -0500 Subject: [PATCH 18/66] We have deferred imports until we get PEP-810 --- synapse/lib/base.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/synapse/lib/base.py b/synapse/lib/base.py index 31774ca0651..a392ee67265 100644 --- a/synapse/lib/base.py +++ b/synapse/lib/base.py @@ -597,10 +597,9 @@ async def main(self, timeout=BASE_MAIN_BG_TASK_TIMEOUT): # pragma: no cover ''' await self.addSignalHandlers() await self.waitfini() - # TODO Fix this deferred import? - import synapse.lib.logging as s_logging # Reset logging to stop the pump task so we're not awaiting # it when shutting down a service. + import synapse.lib.logging as s_logging await s_logging.shutdown() await s_coro.await_bg_tasks(timeout) From 79e248b531886d501dc594beb721cf36e717d652 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 29 Jan 2026 11:16:43 -0500 Subject: [PATCH 19/66] Fix lint errors --- synapse/lib/agenda.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/synapse/lib/agenda.py b/synapse/lib/agenda.py index 156030bcd0c..dbd74896e80 100644 --- a/synapse/lib/agenda.py +++ b/synapse/lib/agenda.py @@ -807,7 +807,7 @@ async def _execute(self, appt): user = self.core.auth.user(appt.creator) if user is None: logger.warning(f'Unknown user {appt.creator} in stored appointment {appt.iden} {appt.name}', - extra = self.core.getLogExtra(iden=appt.iden, name=appt.name, user=appt.creator)) + extra=self.core.getLogExtra(iden=appt.iden, name=appt.name, user=appt.creator)) await self._markfailed(appt, 'unknown user') return @@ -908,7 +908,7 @@ async def _runJob(self, user, appt): except Exception as e: result = f'raised exception {e}' logger.exception(f'Agenda job {appt.iden} {appt.name} raised exception', - extra = self.core.getLogInfo(iden=appt.iden, name=appt.name), + extra=self.core.getLogInfo(iden=appt.iden, name=appt.name), ) else: success = True From a3cea821f5bae172e3565923ee0101052d8d15d7 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 29 Jan 2026 13:46:26 -0500 Subject: [PATCH 20/66] ahs:service to ahaservice --- synapse/lib/cell.py | 6 +++--- synapse/tests/test_lib_cell.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index 44b6956349b..7bb09e6ab5e 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -1242,7 +1242,7 @@ async def __anit__(self, dirn, conf=None, readonly=False, parent=None): ahanetw = self.conf.get('aha:network') if ahaname is not None and ahanetw is not None: self.ahasvcname = f'{ahaname}.{ahanetw}' - s_logging.setLogInfo('aha:service', self.ahasvcname) + s_logging.setLogInfo('ahaservice', self.ahasvcname) # each cell has a guid path = s_common.genpath(self.dirn, 'cell.guid') @@ -3890,13 +3890,13 @@ def getLogExtra(self, **kwargs): ''' extra = s_logging.getLogExtra(**kwargs) if self.ahasvcname: - extra['loginfo']['aha:service'] = self.ahasvcname + extra['loginfo']['ahaservice'] = self.ahasvcname return extra def getLogConf(self): logconf = s_logging.getLogConf() if self.ahasvcname is not None: - logconf['loginfo']['aha:service'] = self.ahasvcname + logconf['loginfo']['ahaservice'] = self.ahasvcname return logconf def modCellConf(self, conf): diff --git a/synapse/tests/test_lib_cell.py b/synapse/tests/test_lib_cell.py index 3707740aa4c..8ee6a7f98ca 100644 --- a/synapse/tests/test_lib_cell.py +++ b/synapse/tests/test_lib_cell.py @@ -3667,14 +3667,14 @@ async def test_cell_logging(self): # confirm last-one-wins "service" key is always initialized logger.warning('oh hai there!') mesg = stream.jsonlines()[0] - self.eq(mesg['aha:service'], '00.cell.synapse') + self.eq(mesg['ahaservice'], '00.cell.synapse') async with cell00.getLocalProxy() as proxy: logs = await proxy.logs() self.isin('oh hai there!', [m['message'] for m in logs]) - self.isin('00.cell.synapse', [m.get('aha:service') for m in logs]) + self.isin('00.cell.synapse', [m.get('ahaservice') for m in logs]) event = asyncio.Event() async def sendlogs(): From 9e87d787dd354b566acb9e4ec5b0cff591c90770 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 29 Jan 2026 14:41:24 -0500 Subject: [PATCH 21/66] Fix ref error --- synapse/lib/auth.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/synapse/lib/auth.py b/synapse/lib/auth.py index 379449d8cc0..542b18ee58b 100644 --- a/synapse/lib/auth.py +++ b/synapse/lib/auth.py @@ -1459,7 +1459,7 @@ async def tryPasswd(self, passwd, nexs=True, enforce_policy=True): if await s_passwd.checkShadowV2(passwd=passwd, shadow=shadow): await self.auth.setUserInfo(self.iden, 'onepass', None) logger.debug(f'Used one time password for {self.name}', - extra=self.auth.nexroot.cell.getLogExtra(user=self.iden, username=self.name)) + extra=self.auth.nexsroot.cell.getLogExtra(user=self.iden, username=self.name)) return True else: # Backwards compatible password handling @@ -1468,7 +1468,7 @@ async def tryPasswd(self, passwd, nexs=True, enforce_policy=True): if s_common.guid((params, passwd)) == hashed: await self.auth.setUserInfo(self.iden, 'onepass', None) logger.debug(f'Used one time password for {self.name}', - extra=self.auth.nexroot.cell.getLogExtra(user=self.iden, username=self.name)) + extra=self.auth.nexsroot.cell.getLogExtra(user=self.iden, username=self.name)) return True shadow = self.info.get('passwd') @@ -1493,13 +1493,13 @@ async def tryPasswd(self, passwd, nexs=True, enforce_policy=True): if self.iden == self.auth.rootuser.iden: mesg = f'User {self.name} has exceeded the number of allowed password attempts ({valu + 1}),. Cannot lock {self.name} user.' - logger.error(mesg, extra=self.auth.nexroot.cell.getLogExtra(target_user=self.iden, target_username=self.name)) + logger.error(mesg, extra=self.auth.nexsroot.cell.getLogExtra(target_user=self.iden, target_username=self.name)) return False await self.auth.nexsroot.cell.setUserLocked(self.iden, True) mesg = f'User {self.name} has exceeded the number of allowed password attempts ({valu + 1}), locking their account.' - logger.warning(mesg, extra=self.auth.nexroot.cell.getLogExtra(target_user=self.iden, + logger.warning(mesg, extra=self.auth.nexsroot.cell.getLogExtra(target_user=self.iden, target_username=self.name, status='MODIFY')) @@ -1511,7 +1511,7 @@ async def tryPasswd(self, passwd, nexs=True, enforce_policy=True): salt, hashed = shadow if s_common.guid((salt, passwd)) == hashed: logger.debug(f'Migrating password to shadowv2 format for user {self.name}', - extra=self.auth.nexroot.cell.getLogExtra(user=self.iden, username=self.name)) + extra=self.auth.nexsroot.cell.getLogExtra(user=self.iden, username=self.name)) # Update user to new password hashing scheme. We cannot enforce policy # when migrating an existing password. await self.setPasswd(passwd=passwd, nexs=nexs, enforce_policy=False) From a4625cf88fba2a83f4122ebce90d1e4577757b48 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 29 Jan 2026 14:56:07 -0500 Subject: [PATCH 22/66] Fix test_model_invalid_comp_types --- synapse/datamodel.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/synapse/datamodel.py b/synapse/datamodel.py index d530552582c..bcf63c43130 100644 --- a/synapse/datamodel.py +++ b/synapse/datamodel.py @@ -944,7 +944,6 @@ def _checkTypeDef(self, typ): if 'comp' in typ.info.get('bases', ()): for fname, ftypename in typ.opts.get('fields', ()): - extra = efunc(type=typ.name, field=fname) if isinstance(ftypename, (list, tuple)): ftypename = ftypename[0] @@ -953,7 +952,7 @@ def _checkTypeDef(self, typ): ftype = typ.tcache[fname] except s_exc.BadTypeDef: mesg = f'The {typ.name} field {fname} is declared as a type ({ftypename}) that does not exist.' - logger.warning(mesg, extra=extra) + logger.warning(mesg, efunc(type=typ.name, field=fname)) continue # We're only interested in extended model comp types @@ -962,12 +961,11 @@ def _checkTypeDef(self, typ): if ftype.ismutable: mesg = f'Comp types with mutable fields ({typ.name}:{fname}) are deprecated and will be removed in 3.0.0.' - logger.warning(mesg, extra=extra) + logger.warning(mesg, efunc(type=typ.name, field=fname)) if ftype.deprecated: mesg = f'The type {typ.name} field {fname} uses a deprecated type {ftype.name}.' - extra['synapse']['field:type'] = ftype.name - logger.warning(mesg, extra=extra) + logger.warning(mesg, efunc(type=typ.name, field=fname, field_type=ftype.name)) def addForm(self, formname, forminfo, propdefs, checks=True): From d74d237cfc5a2257bb56964e0b54c9db87e2c1f3 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 29 Jan 2026 15:07:39 -0500 Subject: [PATCH 23/66] Remove stray_base. It was only used for testing. --- synapse/cortex.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index 436977245ef..99c068ad76b 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -1654,8 +1654,6 @@ async def _addAllLayrRead(self): async def initServiceRuntime(self): - self.stray_base = await s_base.Base.anit() - # do any post-nexus initialization here... if self.isactive: await self._checkNexsIndx() From e8612f29a5579fe07567df0a5526a20f818be04c Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 29 Jan 2026 15:09:57 -0500 Subject: [PATCH 24/66] Restore the double logging for username and user atm --- synapse/cortex.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index 99c068ad76b..0908d7bdc86 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -6503,10 +6503,10 @@ def _logStormQuery(self, text, user, info=None): info = {} info['text'] = text # username and user here are logged twice. - # info['username'] = user.name - # info['user'] = user.iden + info['username'] = user.name + info['user'] = user.iden info['hash'] = s_storm.queryhash(text) - # TODO Understand why this is using s_logging.getLogExtra instead of self.getLogExtra + # TODO Understand why this is using s_logging.getLogExtra instead of self.getLogExtra extra = s_logging.getLogExtra(**info) stormlogger.log(self.stormloglvl, 'Executing storm query {%s} as [%s]', text, user.name, extra=extra) From c546e1d61d212622567495009e1e0f4946b14bd0 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 29 Jan 2026 15:11:18 -0500 Subject: [PATCH 25/66] Remove super() comment --- synapse/tests/utils.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/synapse/tests/utils.py b/synapse/tests/utils.py index 205a93720ec..fac1e300035 100644 --- a/synapse/tests/utils.py +++ b/synapse/tests/utils.py @@ -1045,9 +1045,6 @@ def __init__(self, *args, **kwargs): def tearDown(self): s_logging.reset() - # We should not need to call super here; unittest.TestCase does not have a tearDown(). - # Implementers have the burden of calling super().teardown() - # return super().tearDown() def checkNode(self, node, expected): ex_ndef, ex_props = expected From 46580a5361c1088c231b3bb2aef576124608d908 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 29 Jan 2026 15:12:47 -0500 Subject: [PATCH 26/66] Mark structlog.JsonFormatter as deprecated. --- synapse/lib/structlog.py | 1 + 1 file changed, 1 insertion(+) diff --git a/synapse/lib/structlog.py b/synapse/lib/structlog.py index 8be80867c02..77c04d1d931 100644 --- a/synapse/lib/structlog.py +++ b/synapse/lib/structlog.py @@ -7,6 +7,7 @@ class JsonFormatter(logging.Formatter): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) + s_common.deprecated('synapse.lib.structlog.JsonFormatter') def format(self, record: logging.LogRecord): From 30d2d2dcef040ccb28471c607071a14357618ecf Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 29 Jan 2026 15:23:32 -0500 Subject: [PATCH 27/66] Fix lint error --- synapse/cortex.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index 0908d7bdc86..832c632a53e 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -6506,7 +6506,7 @@ def _logStormQuery(self, text, user, info=None): info['username'] = user.name info['user'] = user.iden info['hash'] = s_storm.queryhash(text) - # TODO Understand why this is using s_logging.getLogExtra instead of self.getLogExtra + # TODO Understand why this is using s_logging.getLogExtra instead of self.getLogExtra extra = s_logging.getLogExtra(**info) stormlogger.log(self.stormloglvl, 'Executing storm query {%s} as [%s]', text, user.name, extra=extra) From 2f36165add81e93065f29728687ab9ff8551d41a Mon Sep 17 00:00:00 2001 From: epiphyte Date: Fri, 30 Jan 2026 09:22:09 -0500 Subject: [PATCH 28/66] Do not log the telepath session id --- synapse/lib/logging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 7bbf6344d45..3f806848cd6 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -99,7 +99,7 @@ def genLogInfo(self, record): loginfo['username'] = user.name elif (sess := s_scope.get('sess')) is not None: - loginfo['sess'] = sess.iden + # loginfo['sess'] = sess.iden if sess.user is not None: loginfo['user'] = sess.user.iden loginfo['username'] = sess.user.name From 78b380c4f5881c8cbc2dd17bd2c5191388c128de Mon Sep 17 00:00:00 2001 From: epiphyte Date: Fri, 30 Jan 2026 09:51:42 -0500 Subject: [PATCH 29/66] Test - Give preference to runtime user scope -> then explicit user -> then session scope. --- synapse/lib/logging.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 3f806848cd6..7d6d62635d2 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -94,7 +94,15 @@ def genLogInfo(self, record): if hasattr(record, 'loginfo'): loginfo.update(record.loginfo) - if (user := s_scope.get('user')) is not None: + user = None + if (runt := s_scope.get('runt')) is not None: + # Give preference to runtime user? + if (user := runt.user ) is not None: + loginfo['user'] = user.iden + loginfo['username'] = user.name + + elif user is not None and (user := s_scope.get('user')) is not None: + # if (user := s_scope.get('user')) is not None: loginfo['user'] = user.iden loginfo['username'] = user.name From 50b092db025c3f0d4040393a1f9368bb6f97212d Mon Sep 17 00:00:00 2001 From: epiphyte Date: Fri, 30 Jan 2026 10:07:32 -0500 Subject: [PATCH 30/66] Fix lint error --- synapse/lib/logging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 7d6d62635d2..15ec7e667a8 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -97,7 +97,7 @@ def genLogInfo(self, record): user = None if (runt := s_scope.get('runt')) is not None: # Give preference to runtime user? - if (user := runt.user ) is not None: + if (user := runt.user) is not None: loginfo['user'] = user.iden loginfo['username'] = user.name From 42e28a13711b30b54214352aa452ef85b9ddcad5 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Fri, 30 Jan 2026 11:16:33 -0500 Subject: [PATCH 31/66] revert runt change --- synapse/lib/logging.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 15ec7e667a8..10cb5732040 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -94,20 +94,11 @@ def genLogInfo(self, record): if hasattr(record, 'loginfo'): loginfo.update(record.loginfo) - user = None - if (runt := s_scope.get('runt')) is not None: - # Give preference to runtime user? - if (user := runt.user) is not None: - loginfo['user'] = user.iden - loginfo['username'] = user.name - - elif user is not None and (user := s_scope.get('user')) is not None: - # if (user := s_scope.get('user')) is not None: + if (user := s_scope.get('user')) is not None: loginfo['user'] = user.iden loginfo['username'] = user.name elif (sess := s_scope.get('sess')) is not None: - # loginfo['sess'] = sess.iden if sess.user is not None: loginfo['user'] = sess.user.iden loginfo['username'] = sess.user.name From 141eda6ca532052fc94a276cbe02f7e955bb5879 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Mon, 2 Feb 2026 22:03:16 -0500 Subject: [PATCH 32/66] docstrings for expect() --- synapse/tests/utils.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/synapse/tests/utils.py b/synapse/tests/utils.py index fac1e300035..4baeb8a7a81 100644 --- a/synapse/tests/utils.py +++ b/synapse/tests/utils.py @@ -817,21 +817,33 @@ def clear(self): self._lines.clear() self.seek(0) self.truncate() + self._event.clear() def write(self, s): - retn = io.StringIO.write(self, s) + io.StringIO.write(self, s) self._lines.append(s) self._event.set() - async def expect(self, text, count=1, timeout=5, escape=True): + async def expect(self, text, count=1, timeout=6, escape=True): + ''' + Expect a string to be present in the logged text. + + Args: + text: String to search the logs for. + count: Number of occurances of the entry. + timeout: Amount of time to wait for the text to be logged. + escape: re.escape() the provided text; to to false is text is a regular expression. + Returns: + True if the text is found. + ''' try: coro = self._expect(text, count=count, escape=escape) await s_common.wait_for(coro, timeout=timeout) except TimeoutError: logger.warning(f'Pattern [{text}] not found in...') [logger.warning(f' {line}') for line in self._lines] - raise s_exc.SynErr(mesg=f'Pattern [{text}] not found!') + raise AssertionError(f'Pattern [{text}] not found!') async def _expect(self, text, count=1, escape=True): From d7fb94fe24dc0cbc0c1ed6e7778bf37c537548d6 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Mon, 2 Feb 2026 22:23:03 -0500 Subject: [PATCH 33/66] Fix tests --- synapse/tests/test_lib_cell.py | 2 +- synapse/tests/test_telepath.py | 2 +- synapse/tests/test_utils.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/synapse/tests/test_lib_cell.py b/synapse/tests/test_lib_cell.py index 14caa1acc64..d4448cca260 100644 --- a/synapse/tests/test_lib_cell.py +++ b/synapse/tests/test_lib_cell.py @@ -2627,7 +2627,7 @@ async def wrapDelWriteHold(root, reason): log_enable_writes = f'Free space on {core.dirn} above minimum threshold' with self.getLoggerStream('synapse.lib.cell') as stream: await core.nexsroot.addWriteHold(tmp_reason := 'something else') - with self.raises(s_exc.SynErr): + with self.raises(AssertionError): await stream.expect(log_enable_writes, timeout=1) stream.seek(0) self.eq(stream.read(), '') diff --git a/synapse/tests/test_telepath.py b/synapse/tests/test_telepath.py index ce04cf18405..8afc9f4aad1 100644 --- a/synapse/tests/test_telepath.py +++ b/synapse/tests/test_telepath.py @@ -1417,7 +1417,7 @@ async def test_telepath_exception_logging(self): except asyncio.CancelledError: pass - with self.raises(s_exc.SynErr): + with self.raises(AssertionError): await stream.expect('error during task: callStorm', timeout=0.5) with self.getLoggerStream('synapse.daemon') as stream: diff --git a/synapse/tests/test_utils.py b/synapse/tests/test_utils.py index 4fe46eb2556..7d24bec5f82 100644 --- a/synapse/tests/test_utils.py +++ b/synapse/tests/test_utils.py @@ -106,7 +106,7 @@ async def test_syntest_logstream(self): logger.error('ruh roh i am a error message') await stream.expect('ruh roh i am a error message', timeout=1) - with self.raises(s_exc.SynErr): + with self.raises(AssertionError): await stream.expect('does not exist', timeout=0.01) self.notin('newp', stream.getvalue()) From 72ac941a0b646462a00b4ce76e96c4ce65224251 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Tue, 3 Feb 2026 09:19:24 -0500 Subject: [PATCH 34/66] Run a CI suite with pytest.ini --- pytest.ini | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 pytest.ini diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 00000000000..f7f02e55fc4 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,8 @@ +[pytest] +log_cli = true +log_cli_format = %(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(funcName)s:%(threadName)s:%(processName)s] +log_cli_date_format = %Y-%m-%d %H:%M:%S +log_cli_level=debug + +;addopts = -s -v --disable-warnings +addopts = -s -v \ No newline at end of file From aae8d2d257491903eb424afc4473a9182af88351 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Tue, 3 Feb 2026 09:19:32 -0500 Subject: [PATCH 35/66] Revert "Run a CI suite with pytest.ini" This reverts commit 72ac941a0b646462a00b4ce76e96c4ce65224251. --- pytest.ini | 8 -------- 1 file changed, 8 deletions(-) delete mode 100644 pytest.ini diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index f7f02e55fc4..00000000000 --- a/pytest.ini +++ /dev/null @@ -1,8 +0,0 @@ -[pytest] -log_cli = true -log_cli_format = %(asctime)s [%(levelname)s] %(message)s [%(filename)s:%(funcName)s:%(threadName)s:%(processName)s] -log_cli_date_format = %Y-%m-%d %H:%M:%S -log_cli_level=debug - -;addopts = -s -v --disable-warnings -addopts = -s -v \ No newline at end of file From b1e54162cb5b42dfcfdec1b378bc0dbb1f43fe11 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Tue, 3 Feb 2026 10:10:37 -0500 Subject: [PATCH 36/66] Clarify a comment --- synapse/cortex.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index d62245d7242..39e66906be5 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -6505,7 +6505,9 @@ def _logStormQuery(self, text, user, info=None): if info is None: info = {} info['text'] = text - # username and user here are logged twice. + # username and user here are logged twice; which represents a + # difference between the auth user who initiated the action + # and the target user who is executing the runtime. info['username'] = user.name info['user'] = user.iden info['hash'] = s_storm.queryhash(text) From d8d4fa94e41a49e7f448bc2b68715de7b0d89187 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Tue, 3 Feb 2026 10:20:26 -0500 Subject: [PATCH 37/66] log the api name in adminapi() message --- synapse/lib/cell.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index 332199eb22e..97fe0c1953a 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -106,7 +106,7 @@ def wrapped(self, *args, **kwargs): user=self.user.iden, username=self.user.name) if log: extra = s_logging.getLogExtra(func=func.__qualname__, args=args, kwargs=kwargs) - logger.info(f'Admin API invoked', extra=extra) + logger.info(f'Admin API invoked api={func.__qualname__}', extra=extra) return func(self, *args, **kwargs) From ea08d8dca3c8f1af69427227a1444c2420f94caa Mon Sep 17 00:00:00 2001 From: epiphyte Date: Tue, 3 Feb 2026 12:49:41 -0500 Subject: [PATCH 38/66] Remove obe import --- synapse/common.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/common.py b/synapse/common.py index ee2714cc092..3f04f20689f 100644 --- a/synapse/common.py +++ b/synapse/common.py @@ -42,7 +42,6 @@ import synapse.lib.const as s_const import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack -import synapse.lib.structlog as s_structlog import synapse.vendor.cpython.lib.ipaddress as ipaddress import synapse.vendor.cpython.lib.http.cookies as v_cookies From b73c35250671e804fb06bb654c38220b04f5a095 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Tue, 3 Feb 2026 15:15:34 -0500 Subject: [PATCH 39/66] Thought shall not make synapse/common have import side effects. --- synapse/common.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/synapse/common.py b/synapse/common.py index 3f04f20689f..d1b99c35048 100644 --- a/synapse/common.py +++ b/synapse/common.py @@ -7,9 +7,7 @@ import stat import time import heapq -import queue import types -import atexit import base64 import shutil import struct @@ -18,12 +16,10 @@ import decimal import fnmatch import hashlib -import logging import tarfile import binascii import builtins import tempfile -import warnings import functools import itertools import threading @@ -39,8 +35,6 @@ import regex import synapse.exc as s_exc -import synapse.lib.const as s_const -import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack import synapse.vendor.cpython.lib.ipaddress as ipaddress @@ -859,12 +853,14 @@ def config(conf, confdefs): @functools.lru_cache(maxsize=1024) def deprecated(name, curv='2.x', eolv='3.0.0'): mesg = f'{name} is deprecated in {curv} and will be removed in {eolv}' + import synapse.lib.logging as s_logging logger.warning(mesg, extra=s_logging.getLogExtra(curv=curv, eolv=eolv)) return mesg @functools.lru_cache(maxsize=1024) def deprdate(name, date): # pragma: no cover mesg = f'{name} is deprecated and will be removed on {date}.' + import synapse.lib.logging as s_logging logger.warning(mesg, extra=s_logging.getLogExtra(eold=date)) return mesg From a30fbfcf7ede2a7c2d6619b11135a7dcb1db1c83 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Wed, 4 Feb 2026 09:02:38 -0500 Subject: [PATCH 40/66] add a few test assertions along logging --- synapse/lib/logging.py | 2 ++ synapse/tests/test_lib_logging.py | 6 ++++++ 2 files changed, 8 insertions(+) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 10cb5732040..139a581aba7 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -287,6 +287,8 @@ async def shutdown(): before removing it and resetting log attributes. A StreamHandler is then re-installed on the root logger to allow for messages from sources like atexit handlers to be logged. + + This should be called at service or tool teardown. ''' # Give the pump task a small opportunity to drain its # queue of items and exit cleanly. diff --git a/synapse/tests/test_lib_logging.py b/synapse/tests/test_lib_logging.py index eb8d14fefa3..c53ec43f6c7 100644 --- a/synapse/tests/test_lib_logging.py +++ b/synapse/tests/test_lib_logging.py @@ -14,7 +14,10 @@ class LoggingTest(s_test.SynTest): async def test_lib_logging(self): + # Installs the logginghandlers + self.none(s_logging.StreamHandler._pump_task) s_logging.setup() + self.nn(s_logging.StreamHandler._pump_task) self.eq(10, s_logging.normLogLevel(' 10 ')) self.eq(10, s_logging.normLogLevel(10)) @@ -77,3 +80,6 @@ async def test_lib_logging(self): self.eq(mesg['error']['notes'], ('outer note',)) self.eq(mesg['error']['context']['code'], 'SynErr') self.eq(mesg['error']['context']['notes'], ('inner note',)) + + s_logging.reset() + self.none(s_logging.StreamHandler._pump_task) From b3902a68ef321ce1f53142fe35d664931b550f24 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 5 Feb 2026 09:22:49 -0500 Subject: [PATCH 41/66] Revert "Thought shall not make synapse/common have import side effects." This reverts commit b73c35250671e804fb06bb654c38220b04f5a095. --- synapse/common.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/synapse/common.py b/synapse/common.py index d1b99c35048..3f04f20689f 100644 --- a/synapse/common.py +++ b/synapse/common.py @@ -7,7 +7,9 @@ import stat import time import heapq +import queue import types +import atexit import base64 import shutil import struct @@ -16,10 +18,12 @@ import decimal import fnmatch import hashlib +import logging import tarfile import binascii import builtins import tempfile +import warnings import functools import itertools import threading @@ -35,6 +39,8 @@ import regex import synapse.exc as s_exc +import synapse.lib.const as s_const +import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack import synapse.vendor.cpython.lib.ipaddress as ipaddress @@ -853,14 +859,12 @@ def config(conf, confdefs): @functools.lru_cache(maxsize=1024) def deprecated(name, curv='2.x', eolv='3.0.0'): mesg = f'{name} is deprecated in {curv} and will be removed in {eolv}' - import synapse.lib.logging as s_logging logger.warning(mesg, extra=s_logging.getLogExtra(curv=curv, eolv=eolv)) return mesg @functools.lru_cache(maxsize=1024) def deprdate(name, date): # pragma: no cover mesg = f'{name} is deprecated and will be removed on {date}.' - import synapse.lib.logging as s_logging logger.warning(mesg, extra=s_logging.getLogExtra(eold=date)) return mesg From b949b616268765208ea4b1f9a4895d1eb7a21ba4 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 5 Feb 2026 09:31:39 -0500 Subject: [PATCH 42/66] We're now free to import synapse.lib.logging much easiser. --- synapse/datamodel.py | 13 ++++--------- synapse/lib/base.py | 6 +++--- synapse/lib/cmd.py | 3 +-- synapse/lib/coro.py | 2 -- synapse/lib/json.py | 3 +-- synapse/lib/processpool.py | 1 - 6 files changed, 9 insertions(+), 19 deletions(-) diff --git a/synapse/datamodel.py b/synapse/datamodel.py index e0f29990610..794ded1dde3 100644 --- a/synapse/datamodel.py +++ b/synapse/datamodel.py @@ -16,6 +16,7 @@ import synapse.lib.types as s_types import synapse.lib.dyndeps as s_dyndeps import synapse.lib.grammar as s_grammar +import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack logger = logging.getLogger(__name__) @@ -946,12 +947,6 @@ def addType(self, typename, basename, typeopts, typeinfo, checks=True): self._modeldef['types'].append(newtype.getTypeDef()) def _checkTypeDef(self, typ): - if self.core is not None: - efunc = self.core.getLogExtra - else: - import synapse.lib.logging as s_logging - efunc = s_logging.getLogExtra - if 'comp' in typ.info.get('bases', ()): for fname, ftypename in typ.opts.get('fields', ()): @@ -962,7 +957,7 @@ def _checkTypeDef(self, typ): ftype = typ.tcache[fname] except s_exc.BadTypeDef: mesg = f'The {typ.name} field {fname} is declared as a type ({ftypename}) that does not exist.' - logger.warning(mesg, efunc(type=typ.name, field=fname)) + logger.warning(mesg, s_logging.getLogExtra(type=typ.name, field=fname)) continue # We're only interested in extended model comp types @@ -971,11 +966,11 @@ def _checkTypeDef(self, typ): if ftype.ismutable: mesg = f'Comp types with mutable fields ({typ.name}:{fname}) are deprecated and will be removed in 3.0.0.' - logger.warning(mesg, efunc(type=typ.name, field=fname)) + logger.warning(mesg, s_logging.getLogExtra(type=typ.name, field=fname)) if ftype.deprecated: mesg = f'The type {typ.name} field {fname} uses a deprecated type {ftype.name}.' - logger.warning(mesg, efunc(type=typ.name, field=fname, field_type=ftype.name)) + logger.warning(mesg, s_logging.getLogExtra(type=typ.name, field=fname, field_type=ftype.name)) def addForm(self, formname, forminfo, propdefs, checks=True): diff --git a/synapse/lib/base.py b/synapse/lib/base.py index a392ee67265..64cc216b322 100644 --- a/synapse/lib/base.py +++ b/synapse/lib/base.py @@ -18,6 +18,7 @@ import synapse.lib.coro as s_coro import synapse.lib.scope as s_scope +import synapse.lib.logging as s_logging logger = logging.getLogger(__name__) @@ -597,9 +598,8 @@ async def main(self, timeout=BASE_MAIN_BG_TASK_TIMEOUT): # pragma: no cover ''' await self.addSignalHandlers() await self.waitfini() - # Reset logging to stop the pump task so we're not awaiting - # it when shutting down a service. - import synapse.lib.logging as s_logging + # shutdown logging to allow it to drain any queued messages it has, swapping in a stream handler, + # and then cancellling the task so we do not have to await the pump task in bg tasks. await s_logging.shutdown() await s_coro.await_bg_tasks(timeout) diff --git a/synapse/lib/cmd.py b/synapse/lib/cmd.py index 0eb6de4e81d..e72f686c1e2 100644 --- a/synapse/lib/cmd.py +++ b/synapse/lib/cmd.py @@ -6,6 +6,7 @@ import synapse.lib.coro as s_coro import synapse.lib.output as s_output +import synapse.lib.logging as s_logging class Parser(argparse.ArgumentParser): ''' @@ -48,8 +49,6 @@ async def wrapmain(func, logconf=None): # pragma: no cover if logconf is None: logconf = {'level': 'DEBUG'} - import synapse.lib.logging as s_logging - s_logging.setup(**logconf) try: diff --git a/synapse/lib/coro.py b/synapse/lib/coro.py index 8fd6d560eef..689653904c8 100644 --- a/synapse/lib/coro.py +++ b/synapse/lib/coro.py @@ -19,8 +19,6 @@ import synapse.glob as s_glob import synapse.common as s_common -import synapse.lib.logging as s_logging - def iscoro(item): return inspect.iscoroutine(item) diff --git a/synapse/lib/json.py b/synapse/lib/json.py index 9764a8f8342..64b1c723ef6 100644 --- a/synapse/lib/json.py +++ b/synapse/lib/json.py @@ -10,6 +10,7 @@ import yyjson import synapse.exc as s_exc +import synapse.lib.logging as s_logging logger = logging.getLogger(__name__) @@ -44,7 +45,6 @@ def loads(s: str | bytes) -> Any: return yyjson.Document(s, flags=yyjson.ReaderFlags.BIGNUM_AS_RAW).as_obj except (ValueError, TypeError) as exc: - import synapse.lib.logging as s_logging logger.warning('Using fallback JSON deserialization. Please report this to Vertex.', extra=s_logging.getLogExtra(fn='loads', reason=(str(exc)))) return _fallback_loads(s) @@ -126,7 +126,6 @@ def dumps(obj: Any, sort_keys: bool = False, indent: bool = False, default: Opti try: return _dumps(obj, sort_keys=sort_keys, indent=indent, default=default, newline=newline) except UnicodeEncodeError as exc: - import synapse.lib.logging as s_logging logger.warning('Using fallback JSON serialization. Please report this to Vertex.', extra=s_logging.getLogExtra(fn='dumps', reason=(str(exc)))) diff --git a/synapse/lib/processpool.py b/synapse/lib/processpool.py index 281e29bbf54..677b8a67c00 100644 --- a/synapse/lib/processpool.py +++ b/synapse/lib/processpool.py @@ -14,7 +14,6 @@ logger = logging.getLogger(__name__) import synapse.exc as s_exc -import synapse.common as s_common import synapse.lib.logging as s_logging import synapse.lib.process as s_process From 705ba8ea4712d8c4bd803684e4ad88da3a9a6863 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 5 Feb 2026 10:40:20 -0500 Subject: [PATCH 43/66] Queue import is no longer circular --- synapse/lib/logging.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 139a581aba7..29bc730a9f2 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -12,6 +12,7 @@ import synapse.lib.json as s_json import synapse.lib.const as s_const import synapse.lib.scope as s_scope +import synapse.lib.queue as s_queue logger = logging.getLogger(__name__) @@ -201,8 +202,6 @@ def logs(last=100): return tuple(StreamHandler._logs_fifo)[-last:] async def watch(last=100): - # avoid a circular import... - import synapse.lib.queue as s_queue async with await s_queue.Window.anit(maxsize=10000) as window: await window.puts(logs(last=last)) _log_wins.add(window) From c2ed244260d1e85f93f9efa391571229d8064bff Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 5 Feb 2026 10:56:11 -0500 Subject: [PATCH 44/66] Revert "Queue import is no longer circular" This reverts commit 705ba8ea4712d8c4bd803684e4ad88da3a9a6863. --- synapse/lib/logging.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 29bc730a9f2..139a581aba7 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -12,7 +12,6 @@ import synapse.lib.json as s_json import synapse.lib.const as s_const import synapse.lib.scope as s_scope -import synapse.lib.queue as s_queue logger = logging.getLogger(__name__) @@ -202,6 +201,8 @@ def logs(last=100): return tuple(StreamHandler._logs_fifo)[-last:] async def watch(last=100): + # avoid a circular import... + import synapse.lib.queue as s_queue async with await s_queue.Window.anit(maxsize=10000) as window: await window.puts(logs(last=last)) _log_wins.add(window) From 37fd62ff370e8e2d4dfe713b79d2780eb313212e Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 12 Feb 2026 17:35:09 -0500 Subject: [PATCH 45/66] Refactor logging tests to better isolate pieces of functionality, add popLogInfo, make loq size a parameter instead of a fixed value, fix a GC issue with windows. --- synapse/lib/logging.py | 22 +++++- synapse/tests/test_lib_logging.py | 123 +++++++++++++++++++++++++++--- 2 files changed, 130 insertions(+), 15 deletions(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 139a581aba7..fcc86a43faf 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -18,6 +18,7 @@ _log_wins = weakref.WeakSet() LOG_PUMP_TASK_TIMEOUT = int(os.environ.get('SYNDEV_LOG_TASK_SHUTDOWN_TIMEOUT', 1)) +LOG_QUEUE_SIZES = int(os.environ.get('SYNDEV_LOG_QUEUE_SIZE', 1000)) # TODO - Handle exception groups def excinfo(e, _seen=None): @@ -66,6 +67,12 @@ def setLogInfo(name, valu): ''' _glob_loginfo[name] = valu +def popLogInfo(name): + ''' + Remove a global value from being added to every log. + ''' + _glob_loginfo.pop(name, None) + def getLogExtra(**kwargs): ''' Construct a properly enveloped log extra dictionary. @@ -84,6 +91,8 @@ def genLogInfo(self, record): 'logger': { 'name': record.name, 'func': record.funcName, + 'process': record.processName, + 'thread': record.threadName, }, 'level': record.levelname, 'time': self.formatTime(record, self.datefmt), @@ -139,9 +148,9 @@ class StreamHandler(logging.StreamHandler): _pump_exit_flag = False _glob_handler = None - _logs_fifo = collections.deque(maxlen=1000) - _logs_todo = collections.deque(maxlen=1000) - _text_todo = collections.deque(maxlen=1000) + _logs_fifo = collections.deque(maxlen=LOG_QUEUE_SIZES) + _logs_todo = collections.deque(maxlen=LOG_QUEUE_SIZES) + _text_todo = collections.deque(maxlen=LOG_QUEUE_SIZES) def emit(self, record): @@ -188,13 +197,18 @@ async def _pumpLogStream(): for wind in _log_wins: await wind.puts(logstodo) + # Don't hold onto refs of the Window objects inside of this function after we have used them. + # If we don't clear this ref, then we will hold a reference to the window object longer than neeeded. + # This can lead to the last window object never being GC'd while the pumpLogStream task is running, + # even after its caller has exited the watch() function. + wind = None # NOQA await s_coro.executor(_writestderr, fulltext) if StreamHandler._pump_exit_flag is True: return - except Exception as e: + except Exception: traceback.print_exc() def logs(last=100): diff --git a/synapse/tests/test_lib_logging.py b/synapse/tests/test_lib_logging.py index c53ec43f6c7..83b6688da0e 100644 --- a/synapse/tests/test_lib_logging.py +++ b/synapse/tests/test_lib_logging.py @@ -1,9 +1,10 @@ +import gc import asyncio import logging import synapse.exc as s_exc -import synapse.lib.base as s_base +import synapse.lib.coro as s_coro import synapse.lib.logging as s_logging import synapse.tests.utils as s_test @@ -12,12 +13,7 @@ class LoggingTest(s_test.SynTest): - async def test_lib_logging(self): - - # Installs the logginghandlers - self.none(s_logging.StreamHandler._pump_task) - s_logging.setup() - self.nn(s_logging.StreamHandler._pump_task) + async def test_lib_logging_norm(self): self.eq(10, s_logging.normLogLevel(' 10 ')) self.eq(10, s_logging.normLogLevel(10)) @@ -35,14 +31,122 @@ async def test_lib_logging(self): with self.raises(s_exc.BadArg): s_logging.normLogLevel({'key': 'newp'}) - s_logging.reset() + async def test_lib_logging_base(self): + + # Ensure we're starting the test from a clean slate + self.none(s_logging.StreamHandler._pump_task) + + # Installs the logginghandlers + s_logging.setup() + self.nn(s_logging.StreamHandler._pump_task) + + # Enwusre that while we have a running logging task, that windowing of live log events to a consumer works. + + msgs = [] + # Indicates that the function has started + evnt0 = asyncio.Event() + # Indicates the function has entered the window + evnt1 = asyncio.Event() + # Indicates the function has left Window.__aiter__ - this will start the teardown the Window object, + # which will eventually cause __aiter__ to exit and leave the ioloop. + evnt2 = asyncio.Event() + async def collector(): + evnt0.set() + async for m in s_logging.watch(last=0): + evnt1.set() + msgs.append(m) + if m.get('params').get('fini'): + break + evnt2.set() + return True + + self.len(0, s_logging._log_wins) + + task = s_coro.create_task(collector()) + await asyncio.wait_for(evnt0.wait(), timeout=12) + + logger.error('window0') + await asyncio.wait_for(evnt1.wait(), timeout=12) + self.len(1, s_logging._log_wins) + + logger.error('window1', extra=s_logging.getLogExtra(fini=True)) + + await asyncio.wait_for(evnt2.wait(), timeout=12) + self.true(await task) + + self.len(2, msgs) + self.eq([m.get('message') for m in msgs], ['window0', 'window1']) + + # Ensure that the ioloop can remove the empty Window.__aiter__ task, + # which will release the Window ref and allow it to be GC'd. + # Test runs gc.collect() as a safety for test stability. + await asyncio.sleep(0) + gc.collect() + self.len(0, s_logging._log_wins) + + s_logging.reset() self.none(s_logging.StreamHandler._pump_task) self.none(s_logging.StreamHandler._pump_event) self.len(0, s_logging.StreamHandler._logs_fifo) self.len(0, s_logging.StreamHandler._text_todo) self.len(0, s_logging.StreamHandler._logs_todo) + # Ensure various log messages are properly captured a structured data in the FIFO and logs() works + s_logging.setLogInfo('someglobal', 'testvalu') + with self.getLoggerStream('synapse.tests.test_lib_logging') as stream: + # Log a few messages + logger.error('test0', extra=s_logging.getLogExtra(foo='bar')) + s_logging.popLogInfo('someglobal') + logger.warning('test1', extra=s_logging.getLogExtra(foo='baz')) + logger.info('test2') + logger.debug('test3') + await stream.expect('test3') + + msgs = s_logging.logs() + self.isinstance(msgs, tuple) + self.len(4, msgs) + + msg0 = msgs[0] + self.eq(msg0.get('message'), 'test0') + self.eq(msg0.get('level'), 'ERROR') + self.eq(msg0.get('someglobal'), 'testvalu') + self.eq(msg0.get('params'), {'foo': 'bar'}) + self.isin('time', msg0) + lnfo = msg0.get('logger') + self.eq(lnfo.get('name'), 'synapse.tests.test_lib_logging') + self.eq(lnfo.get('func'), 'test_lib_logging_base') + self.isin('process', lnfo) + self.isin('thread', lnfo) + + msg1 = msgs[1] + self.eq(msg1.get('message'), 'test1') + self.eq(msg1.get('level'), 'WARNING') + self.notin('someglobal', msg1) + self.eq(msg1.get('params'), {'foo': 'baz'}) + self.isin('time', msg0) + + msg2 = msgs[2] + self.eq(msg2.get('message'), 'test2') + self.eq(msg2.get('level'), 'INFO') + self.eq(msg2.get('params'), {}) + + msg3 = msgs[3] + self.eq(msg3.get('message'), 'test3') + self.eq(msg3.get('level'), 'DEBUG') + self.eq(msg3.get('params'), {}) + + s_logging.reset() + + msgs = s_logging.logs() + self.len(0, msgs) + self.isinstance(msgs, tuple) + self.none(s_logging.StreamHandler._pump_task) + + async def test_lib_logging_exception(self): + + # Ensure that various exception information is captured + with self.getLoggerStream('synapse.tests.test_lib_logging') as stream: try: @@ -80,6 +184,3 @@ async def test_lib_logging(self): self.eq(mesg['error']['notes'], ('outer note',)) self.eq(mesg['error']['context']['code'], 'SynErr') self.eq(mesg['error']['context']['notes'], ('inner note',)) - - s_logging.reset() - self.none(s_logging.StreamHandler._pump_task) From 292df9194524781cf30a042546fbf415254401f6 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Fri, 13 Feb 2026 10:33:24 -0500 Subject: [PATCH 46/66] Add tests for shutdown behavor for a running pumptask --- synapse/lib/logging.py | 26 +++++++------ synapse/tests/test_lib_logging.py | 61 +++++++++++++++++++++++++++++++ 2 files changed, 76 insertions(+), 11 deletions(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index fcc86a43faf..8b11ce72fd6 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -198,14 +198,14 @@ async def _pumpLogStream(): for wind in _log_wins: await wind.puts(logstodo) # Don't hold onto refs of the Window objects inside of this function after we have used them. - # If we don't clear this ref, then we will hold a reference to the window object longer than neeeded. + # If we don't clear this ref, then we will hold a reference to the window object longer than needed. # This can lead to the last window object never being GC'd while the pumpLogStream task is running, # even after its caller has exited the watch() function. wind = None # NOQA await s_coro.executor(_writestderr, fulltext) - if StreamHandler._pump_exit_flag is True: + if StreamHandler._pump_exit_flag is True and len(StreamHandler._logs_todo) == 0 and len(StreamHandler._text_todo) == 0: return except Exception: @@ -295,15 +295,7 @@ def reset(clear_globconf=True): _glob_logconf.clear() _glob_loginfo.clear() -async def shutdown(): - ''' - Inverse of setup. Gives the pump task the opportunity to exit - before removing it and resetting log attributes. A StreamHandler - is then re-installed on the root logger to allow for messages - from sources like atexit handlers to be logged. - - This should be called at service or tool teardown. - ''' +async def _shutdown_task(): # Give the pump task a small opportunity to drain its # queue of items and exit cleanly. if StreamHandler._pump_task is not None: @@ -314,6 +306,18 @@ async def shutdown(): except asyncio.TimeoutError: pass + +async def shutdown(): # pragma: no cover + ''' + Inverse of setup. Gives the pump task the opportunity to exit + before removing it and resetting log attributes. A StreamHandler + is then re-installed on the root logger to allow for messages + from sources like atexit handlers to be logged. + + This should be called at service or tool teardown. + ''' + await _shutdown_task() + # Reset all logging configs except globals since we may need those. reset(clear_globconf=False) diff --git a/synapse/tests/test_lib_logging.py b/synapse/tests/test_lib_logging.py index 83b6688da0e..9d6c3afbb8a 100644 --- a/synapse/tests/test_lib_logging.py +++ b/synapse/tests/test_lib_logging.py @@ -2,9 +2,12 @@ import asyncio import logging +import unittest.mock as mock + import synapse.exc as s_exc import synapse.lib.coro as s_coro +import synapse.lib.json as s_json import synapse.lib.logging as s_logging import synapse.tests.utils as s_test @@ -143,6 +146,64 @@ async def collector(): self.isinstance(msgs, tuple) self.none(s_logging.StreamHandler._pump_task) + async def test_lib_logging_shutdown(self): + # Test the _shutdown_task functionality used in s_logging.shutdown() to ensure it drains the logs + # and exits cleanly + + # Installs the logginghandlers + s_logging.setup(structlog=True) + self.nn(s_logging.StreamHandler._pump_task) + + # Let the task startup + await asyncio.sleep(0) + + msgs = [] + nlines = 3 + evnt = asyncio.Event() + + def writemock(text): + lines = text.split('\n') + for line in lines: + if line: + msgs.append(s_json.loads(line)) + if len(msgs) == nlines: + evnt.set() + + # shutdown with a pending log event + with mock.patch('synapse.lib.logging._writestderr', writemock) as patch: + logger.error('message0') + logger.error('message1') + await asyncio.sleep(0) + logger.error('message2') + # Shutdown before nlines have been accounted for + await s_logging._shutdown_task() + self.true(await asyncio.wait_for(evnt.wait(), timeout=12)) + + self.true(s_logging.StreamHandler._pump_task.done()) + self.eq([m.get('message') for m in msgs], ['message0', 'message1', 'message2']) + + s_logging.reset() + + evnt.clear() + msgs.clear() + + s_logging.setup(structlog=True) + await asyncio.sleep(0) + self.false(s_logging.StreamHandler._pump_task.done()) + + # shutdown without a pending log event + with mock.patch('synapse.lib.logging._writestderr', writemock) as patch: + logger.error('message0') + logger.error('message1') + logger.error('message2') + await asyncio.sleep(0) + # Shutdown after nlines have been accounted for + self.true(await asyncio.wait_for(evnt.wait(), timeout=12)) + await s_logging._shutdown_task() + + self.true(s_logging.StreamHandler._pump_task.done()) + self.eq([m.get('message') for m in msgs], ['message0', 'message1', 'message2']) + async def test_lib_logging_exception(self): # Ensure that various exception information is captured From 47ed37989bae0a81c1de0358f7b0bb81620f4570 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Fri, 13 Feb 2026 11:35:18 -0500 Subject: [PATCH 47/66] Update docs --- docs/synapse/devopsguide.rst | 77 ++++++++++++++++++++++++------------ synapse/lib/cell.py | 1 + synapse/lib/view.py | 4 +- 3 files changed, 55 insertions(+), 27 deletions(-) diff --git a/docs/synapse/devopsguide.rst b/docs/synapse/devopsguide.rst index b925847d721..9f4a4f98493 100644 --- a/docs/synapse/devopsguide.rst +++ b/docs/synapse/devopsguide.rst @@ -346,46 +346,73 @@ These structured logs are designed to be easy to ingest into third party log col message, level, time, and metadata about where the log message came from:: { - "message": "log level set to INFO", + "message": "Executing storm query {[inet:asn=1234]} as [someUsername]", "logger": { - "name": "synapse.lib.cell", + "name": "synapse.storm", + "func": "_logStormQuery", "process": "MainProcess", - "filename": "common.py", - "func": "setlogging" + "thread": "MainThread" }, "level": "INFO", - "time": "2021-06-28 15:47:54,825" + "time": "2026-02-13 10:38:24,545", + "user": "a8b94560fd9b6e1e38245a6f7d659106", + "username": "root", + "params": { # Param + "mode": "storm", + "view": "715a5c9ae37e4045795ea3f3cabb44fb", + "text": "[inet:asn=1234]", + "username": "someUsername", + "user": "e3532bc88fa66afb592e6a1474a98675", + "hash": "ef94e89eb3bc309a40242876f6c5f296" + } } +The ``user`` and ``username`` fields at the top level of a log correspond to the currently active / authorized API user +which has caused a log event to occur. These may differ from the user associated with the event, + When exceptions are logged with structured logging, we capture additional information about the exception, including the -entire traceback. In the event that the error is a Synapse Err class, we also capture additional metadata which was -attached to the error. In the following example, we also have the query text, username and user iden available in the -log message pretty-printed log message:: +traceback as structured data. In the event that the error is a Synapse Err class, we also capture additional metadata +which was attached to the error. In the following example, we also have the query text, username and user iden available +in the pretty-printed log message. The ``key=valu`` data that was raised by the user is also included in the +``error.info.`` dictionary. Cause and Context information may be present, corresponding to the Python ``__cause__`` and +``__context__`` attributes on Exception classes.:: { - "message": "Error during storm execution for { || }", + "message": "Error during storm execution for { $lib.raise(Newp, 'ruh roh', key=valu) }", "logger": { "name": "synapse.lib.view", + "func": "runStorm", "process": "MainProcess", - "filename": "view.py", - "func": "runStorm" + "thread": "MainThread" }, "level": "ERROR", - "time": "2021-06-28 15:49:34,401", - "err": { - "efile": "coro.py", - "eline": 233, - "esrc": "return await asyncio.get_running_loop().run_in_executor(forkpool, _runtodo, todo)", - "ename": "forked", - "at": 1, - "text": "||", - "mesg": "No terminal defined for '|' at line 1 col 2. Expecting one of: #, $, (, *, + or -, -(, -+>, -->, ->, :, <(, <+-, <-, <--, [, break, command name, continue, fini, for, function, if, init, property name, return, switch, while, whitespace or comment, yield, {", - "etb": ".... long traceback ...", - "errname": "BadSyntax" - }, - "text": "||", + "time": "2026-02-13 11:24:06,853", + "user": "e889d295b19fd5c12f861cd6fe564aa8", "username": "root", - "user": "3189065f95d3ab0a6904e604260c0be2" + "error": { + "code": "StormRaise", + "traceback": [ + ... list of traceback frames ... + [ + "/home/epiphyte/PycharmProjects/synapse/synapse/lib/stormtypes.py", + 1751, + "_raise", + "raise s_exc.StormRaise(**info)" + ] + ], + "info": { + "key": "valu", + "errname": "Newp", + ... additional error information ... + } + }, + "mesg": "ruh roh" + }, + "params": { + "text": "$lib.raise(Newp, 'ruh roh', key=valu)", + "user": "e3532bc88fa66afb592e6a1474a98675", + "username": "someUsername" + } } Custom date formatting strings can also be provided by setting the ``SYN_LOG_DATEFORMAT`` string. This is expected to be a diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index 39b2335cbc5..e6324cb455d 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -228,6 +228,7 @@ async def logs(self, last=100): async def watch(self, last=100): async for item in self.cell.watch(last=last): yield item + await asyncio.sleep(0) async def allowed(self, perm, default=None): ''' diff --git a/synapse/lib/view.py b/synapse/lib/view.py index ced3afcff9c..3454560253c 100644 --- a/synapse/lib/view.py +++ b/synapse/lib/view.py @@ -978,7 +978,7 @@ async def callStorm(self, text, opts=None): else: mesg = f'Generator control statement "{e.statement}" used outside of a generator function.' logmesg = f'Error during storm execution for {{ {text} }} - {mesg}' - extra = self.core.getLogExtra(text=text) + extra = self.core.getLogExtra(text=text, user=user.iden, username=user.name) logger.exception(logmesg, extra=extra) raise s_exc.StormRuntimeError(mesg=mesg, statement=e.statement, highlight=e.get('highlight')) from e @@ -1099,7 +1099,7 @@ async def runStorm(): if mesg: logmesg = f'{logmesg} - {mesg}' - extra = self.core.getLogExtra(text=text) + extra = self.core.getLogExtra(text=text, user=user.iden, username=user.name) logger.exception(logmesg, extra=extra) enfo = s_common.err(e) From 8e5095e0f448c397272a3fac0ea2d3b336dd0ee4 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Fri, 13 Feb 2026 13:07:09 -0500 Subject: [PATCH 48/66] Coverage for error handling inside of _pumpLogStream --- synapse/lib/logging.py | 3 +-- synapse/tests/test_lib_logging.py | 32 +++++++++++++++++++++++++++++++ 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 8b11ce72fd6..16be27a33bb 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -202,14 +202,13 @@ async def _pumpLogStream(): # This can lead to the last window object never being GC'd while the pumpLogStream task is running, # even after its caller has exited the watch() function. wind = None # NOQA - await s_coro.executor(_writestderr, fulltext) if StreamHandler._pump_exit_flag is True and len(StreamHandler._logs_todo) == 0 and len(StreamHandler._text_todo) == 0: return except Exception: - traceback.print_exc() + _writestderr('Error during log handling\n' + traceback.format_exc()) def logs(last=100): return tuple(StreamHandler._logs_fifo)[-last:] diff --git a/synapse/tests/test_lib_logging.py b/synapse/tests/test_lib_logging.py index 9d6c3afbb8a..bd0ff7dd02f 100644 --- a/synapse/tests/test_lib_logging.py +++ b/synapse/tests/test_lib_logging.py @@ -204,6 +204,38 @@ def writemock(text): self.true(s_logging.StreamHandler._pump_task.done()) self.eq([m.get('message') for m in msgs], ['message0', 'message1', 'message2']) + async def test_lib_logging_pump_error(self): + # Ensure that exceptions are captured and printed to stderr directly + s_logging.setup(structlog=True) + self.nn(s_logging.StreamHandler._pump_task) + + evnt = asyncio.Event() + + data = [] + def writemock(text): + data.append(text) + evnt.set() + + with mock.patch('synapse.lib.logging._writestderr', writemock): + s_logging.StreamHandler._text_todo.append('hehe') + s_logging.StreamHandler._logs_todo.append('hehe') + + s_logging.StreamHandler._text_todo.append(1234) + s_logging.StreamHandler._logs_todo.append('newp') + + # This will cause a type error trying to join hehe and 1234 together + # when the task wakes up and and tries to process the queue + + s_logging.StreamHandler._pump_event.set() + + self.true(await asyncio.wait_for(evnt.wait(), timeout=12)) + + text = '\n'.join(data) + self.isin('Error during log handling', text) + self.isin('Traceback', text) + + s_logging.reset() + async def test_lib_logging_exception(self): # Ensure that various exception information is captured From c8fd04b140e9c4c4bab51f0181320a79e965433a Mon Sep 17 00:00:00 2001 From: epiphyte Date: Fri, 13 Feb 2026 13:10:17 -0500 Subject: [PATCH 49/66] colon --- synapse/lib/logging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 16be27a33bb..d7e8301d6b8 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -208,7 +208,7 @@ async def _pumpLogStream(): return except Exception: - _writestderr('Error during log handling\n' + traceback.format_exc()) + _writestderr('Error during log handling:\n' + traceback.format_exc()) def logs(last=100): return tuple(StreamHandler._logs_fifo)[-last:] From f6a80ad31a2d53993022693f2dc60617a0b79b9b Mon Sep 17 00:00:00 2001 From: epiphyte Date: Sun, 15 Feb 2026 17:40:13 -0500 Subject: [PATCH 50/66] no cover --- synapse/lib/logging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index d7e8301d6b8..683c5953cb7 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -302,7 +302,7 @@ async def _shutdown_task(): StreamHandler._pump_event.set() # Wake the task try: await asyncio.wait_for(StreamHandler._pump_task, timeout=LOG_PUMP_TASK_TIMEOUT) - except asyncio.TimeoutError: + except asyncio.TimeoutError: # pragma: no cover pass From c8c23508785ffe9aa4ac48ee085abc4c1642dc95 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Sun, 15 Feb 2026 17:40:36 -0500 Subject: [PATCH 51/66] no cover --- synapse/lib/logging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 683c5953cb7..235f7eea7ff 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -187,7 +187,7 @@ async def _pumpLogStream(): StreamHandler._pump_event.clear() if StreamHandler._pump_exit_flag is True: return - continue + continue # pragma: no cover StreamHandler._logs_todo.clear() StreamHandler._text_todo.clear() From 1f647d452923890294bb387989ff51bd7548456f Mon Sep 17 00:00:00 2001 From: epiphyte Date: Mon, 16 Feb 2026 09:41:40 -0500 Subject: [PATCH 52/66] Add seatbelts around SYNDEV_LOG envars, add test to show the message dropping behavior that can occur. --- synapse/lib/logging.py | 4 +++ synapse/tests/test_lib_logging.py | 58 +++++++++++++++++++++++++++++++ 2 files changed, 62 insertions(+) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 235f7eea7ff..008a3f7cc76 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -19,6 +19,10 @@ LOG_PUMP_TASK_TIMEOUT = int(os.environ.get('SYNDEV_LOG_TASK_SHUTDOWN_TIMEOUT', 1)) LOG_QUEUE_SIZES = int(os.environ.get('SYNDEV_LOG_QUEUE_SIZE', 1000)) +if LOG_PUMP_TASK_TIMEOUT < 0: # pragma: no cover + raise s_exc.BadConfValu(mesg='SYNDEV_LOG_TASK_SHUTDOWN_TIMEOUT value must be greater than or equal to 0.') +if LOG_QUEUE_SIZES < 1: # pragma: no cover + raise s_exc.BadConfValu(mesg='SYNDEV_LOG_QUEUE_SIZE value must be greater than 1.') # TODO - Handle exception groups def excinfo(e, _seen=None): diff --git a/synapse/tests/test_lib_logging.py b/synapse/tests/test_lib_logging.py index bd0ff7dd02f..24b0a0aefaa 100644 --- a/synapse/tests/test_lib_logging.py +++ b/synapse/tests/test_lib_logging.py @@ -146,6 +146,64 @@ async def collector(): self.isinstance(msgs, tuple) self.none(s_logging.StreamHandler._pump_task) + async def test_lib_logging_window_drop(self): + + # Messages can be dropped from the todo queues if the pump task is not allowed + # to wake up & service the queue. + + # Ensure we're starting the test from a clean slate + self.none(s_logging.StreamHandler._pump_task) + + # Installs the logginghandlers + s_logging.setup() + self.nn(s_logging.StreamHandler._pump_task) + + # Ensure that while we have a running logging task, that windowing of live log events to a consumer works. + + msgs = [] + # Indicates that the function has started + evnt0 = asyncio.Event() + # Indicates the function has entered the window + evnt1 = asyncio.Event() + # Indicates the function has left Window.__aiter__ - this will start the teardown the Window object, + # which will eventually cause __aiter__ to exit and leave the ioloop. + evnt2 = asyncio.Event() + + async def collector(): + evnt0.set() + async for m in s_logging.watch(last=0): + evnt1.set() + msgs.append(m) + if m.get('params').get('fini'): + break + evnt2.set() + return True + + self.len(0, s_logging._log_wins) + + task = s_coro.create_task(collector()) + await asyncio.wait_for(evnt0.wait(), timeout=12) + + logger.error('window0') + + await asyncio.wait_for(evnt1.wait(), timeout=12) + self.len(1, s_logging._log_wins) + + # Now log messages which will exceed the buffer size. This will log N+1 messages, + # meaning that buftest-0 will not be present and window1 will be included. + for n in range(s_logging.LOG_QUEUE_SIZES): + logger.error(f'buftest-{n}') + logger.error('window1', extra=s_logging.getLogExtra(fini=True)) + + await asyncio.wait_for(evnt2.wait(), timeout=12) + self.true(await task) + + self.len(2 + s_logging.LOG_QUEUE_SIZES - 1, msgs) + emsgs = ['window0'] + [f'buftest-{n}' for n in range(1, s_logging.LOG_QUEUE_SIZES)] + ['window1'] + self.eq([m.get('message') for m in msgs], emsgs) + + s_logging.reset() + async def test_lib_logging_shutdown(self): # Test the _shutdown_task functionality used in s_logging.shutdown() to ensure it drains the logs # and exits cleanly From 8e0e96c0e5223f1eb49a651861939ba9e3ad4964 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Tue, 17 Feb 2026 00:17:21 -0500 Subject: [PATCH 53/66] Ensure that exceptionGroup is handled. --- synapse/lib/logging.py | 6 +++-- synapse/tests/test_lib_logging.py | 44 +++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+), 2 deletions(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 008a3f7cc76..f46127cc2aa 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -24,7 +24,6 @@ if LOG_QUEUE_SIZES < 1: # pragma: no cover raise s_exc.BadConfValu(mesg='SYNDEV_LOG_QUEUE_SIZE value must be greater than 1.') -# TODO - Handle exception groups def excinfo(e, _seen=None): if _seen is None: @@ -34,7 +33,7 @@ def excinfo(e, _seen=None): tb = [] for path, line, func, sorc in traceback.extract_tb(e.__traceback__): - # sorc may not be available; enusre that all output is a str + # sorc may not be available; ensure that all output is a str if sorc is None: sorc = '' tb.append((path, line, func, sorc)) @@ -59,6 +58,9 @@ def excinfo(e, _seen=None): ret['info'] = e.errinfo.copy() ret['mesg'] = ret['info'].pop('mesg', None) + if isinstance(e, BaseExceptionGroup) and e.exceptions: + ret['group'] = [excinfo(exc) for exc in e.exceptions] + if ret.get('mesg') is None: ret['mesg'] = str(e) diff --git a/synapse/tests/test_lib_logging.py b/synapse/tests/test_lib_logging.py index 24b0a0aefaa..734997b96ea 100644 --- a/synapse/tests/test_lib_logging.py +++ b/synapse/tests/test_lib_logging.py @@ -335,3 +335,47 @@ async def test_lib_logging_exception(self): self.eq(mesg['error']['notes'], ('outer note',)) self.eq(mesg['error']['context']['code'], 'SynErr') self.eq(mesg['error']['context']['notes'], ('inner note',)) + + stream.clear() + + # This is derived from the cpython Stdlib exception group example here + # https://docs.python.org/3.11/tutorial/errors.html#tut-exception-groups + + def f(): + raise ExceptionGroup( + "group1", + [ + ValueError(1), + SystemError(2), + ExceptionGroup( + "group2", + [ + OSError(3), + RecursionError(4) + ] + ) + ] + ) + + try: + f() + except* OSError as e: + pass + except* SystemError as e: + pass + except* Exception as e: + logger.exception('Error encountered in EG') + + mesg = stream.jsonlines()[0] + self.eq(mesg['message'], 'Error encountered in EG') + self.eq(mesg['error']['code'], 'ExceptionGroup') + self.eq(mesg['error']['mesg'], 'group1 (2 sub-exceptions)') + group = mesg['error']['group'] + self.len(2, group) + self.eq(group[0]['code'], 'ValueError') + self.eq(group[0]['mesg'], '1') + + self.eq(group[1]['code'], 'ExceptionGroup') + self.eq(group[1]['mesg'], 'group2 (1 sub-exception)') + self.eq(group[1]['group'][0]['code'], 'RecursionError') + self.eq(group[1]['group'][0]['mesg'], '4') From dbd692aa0c332b75a96cdd1423ebd6ad9f0a930d Mon Sep 17 00:00:00 2001 From: epiphyte Date: Tue, 17 Feb 2026 10:04:42 -0500 Subject: [PATCH 54/66] update docs --- docs/synapse/devopsguide.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/synapse/devopsguide.rst b/docs/synapse/devopsguide.rst index 9f4a4f98493..ccbe06b84c6 100644 --- a/docs/synapse/devopsguide.rst +++ b/docs/synapse/devopsguide.rst @@ -346,6 +346,7 @@ These structured logs are designed to be easy to ingest into third party log col message, level, time, and metadata about where the log message came from:: { + "ahaservice": "00.cortex.synapse", "message": "Executing storm query {[inet:asn=1234]} as [someUsername]", "logger": { "name": "synapse.storm", @@ -368,7 +369,9 @@ message, level, time, and metadata about where the log message came from:: } The ``user`` and ``username`` fields at the top level of a log correspond to the currently active / authorized API user -which has caused a log event to occur. These may differ from the user associated with the event, +which has caused a log event to occur. These may differ from the user associated with the event. That information would +normally be embedded in the ``params`` key. The ``ahaservice`` key, if present, indicates the service that is +associated with the log message. When exceptions are logged with structured logging, we capture additional information about the exception, including the traceback as structured data. In the event that the error is a Synapse Err class, we also capture additional metadata @@ -378,6 +381,7 @@ in the pretty-printed log message. The ``key=valu`` data that was raised by the ``__context__`` attributes on Exception classes.:: { + "ahaservice": "00.cortex.synapse", "message": "Error during storm execution for { $lib.raise(Newp, 'ruh roh', key=valu) }", "logger": { "name": "synapse.lib.view", From 74c1d83e7980baa3c12f2ecffa9becd5f9e60ef6 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Tue, 17 Feb 2026 10:34:45 -0500 Subject: [PATCH 55/66] Changelog for deprecation. --- changes/7ef2889edb0c2bfb214207b97f0394db.yaml | 7 +++++++ synapse/lib/structlog.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 changes/7ef2889edb0c2bfb214207b97f0394db.yaml diff --git a/changes/7ef2889edb0c2bfb214207b97f0394db.yaml b/changes/7ef2889edb0c2bfb214207b97f0394db.yaml new file mode 100644 index 00000000000..7d9522d6a4d --- /dev/null +++ b/changes/7ef2889edb0c2bfb214207b97f0394db.yaml @@ -0,0 +1,7 @@ +--- +desc: Deprecated ``synapse.lib.structlog.JsonFormatter``. It will be removed after + April 1st, 2026. +desc:literal: false +prs: [] +type: deprecation +... diff --git a/synapse/lib/structlog.py b/synapse/lib/structlog.py index 77c04d1d931..35333fde38b 100644 --- a/synapse/lib/structlog.py +++ b/synapse/lib/structlog.py @@ -7,7 +7,7 @@ class JsonFormatter(logging.Formatter): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - s_common.deprecated('synapse.lib.structlog.JsonFormatter') + s_common.deprdate('synapse.lib.structlog.JsonFormatter', '2026-04-01') def format(self, record: logging.LogRecord): From 62c2063f2b0c935fc91337ef39da28f11ca5749e Mon Sep 17 00:00:00 2001 From: epiphyte Date: Sun, 1 Mar 2026 10:09:10 -0500 Subject: [PATCH 56/66] ahaservice to service --- synapse/lib/cell.py | 6 +++--- synapse/tests/test_lib_cell.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index e6324cb455d..aafb378a55b 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -1244,7 +1244,7 @@ async def __anit__(self, dirn, conf=None, readonly=False, parent=None): ahanetw = self.conf.get('aha:network') if ahaname is not None and ahanetw is not None: self.ahasvcname = f'{ahaname}.{ahanetw}' - s_logging.setLogInfo('ahaservice', self.ahasvcname) + s_logging.setLogInfo('service', self.ahasvcname) # each cell has a guid path = s_common.genpath(self.dirn, 'cell.guid') @@ -3892,13 +3892,13 @@ def getLogExtra(self, **kwargs): ''' extra = s_logging.getLogExtra(**kwargs) if self.ahasvcname: - extra['loginfo']['ahaservice'] = self.ahasvcname + extra['loginfo']['service'] = self.ahasvcname return extra def getLogConf(self): logconf = s_logging.getLogConf() if self.ahasvcname is not None: - logconf['loginfo']['ahaservice'] = self.ahasvcname + logconf['loginfo']['service'] = self.ahasvcname return logconf def modCellConf(self, conf): diff --git a/synapse/tests/test_lib_cell.py b/synapse/tests/test_lib_cell.py index d4448cca260..4c0a1aa8768 100644 --- a/synapse/tests/test_lib_cell.py +++ b/synapse/tests/test_lib_cell.py @@ -3699,14 +3699,14 @@ async def test_cell_logging(self): # confirm last-one-wins "service" key is always initialized logger.warning('oh hai there!') mesg = stream.jsonlines()[0] - self.eq(mesg['ahaservice'], '00.cell.synapse') + self.eq(mesg['service'], '00.cell.synapse') async with cell00.getLocalProxy() as proxy: logs = await proxy.logs() self.isin('oh hai there!', [m['message'] for m in logs]) - self.isin('00.cell.synapse', [m.get('ahaservice') for m in logs]) + self.isin('00.cell.synapse', [m.get('service') for m in logs]) event = asyncio.Event() async def sendlogs(): From 1d2bfd24ee46b5e789d5715eda0bb8fbe427fe5b Mon Sep 17 00:00:00 2001 From: epiphyte Date: Sun, 1 Mar 2026 10:13:31 -0500 Subject: [PATCH 57/66] Don't allow +LOG_PUMP_TASK_TIMEOUT and +LOG_QUEUE_SIZES to be tunable --- synapse/lib/logging.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index f46127cc2aa..5d8a1bf73ce 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -17,12 +17,8 @@ _log_wins = weakref.WeakSet() -LOG_PUMP_TASK_TIMEOUT = int(os.environ.get('SYNDEV_LOG_TASK_SHUTDOWN_TIMEOUT', 1)) -LOG_QUEUE_SIZES = int(os.environ.get('SYNDEV_LOG_QUEUE_SIZE', 1000)) -if LOG_PUMP_TASK_TIMEOUT < 0: # pragma: no cover - raise s_exc.BadConfValu(mesg='SYNDEV_LOG_TASK_SHUTDOWN_TIMEOUT value must be greater than or equal to 0.') -if LOG_QUEUE_SIZES < 1: # pragma: no cover - raise s_exc.BadConfValu(mesg='SYNDEV_LOG_QUEUE_SIZE value must be greater than 1.') +LOG_PUMP_TASK_TIMEOUT = 1 +LOG_QUEUE_SIZES = 1000 def excinfo(e, _seen=None): From b40908c835744e2c0407260bc6dfca68423c3f65 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 5 Mar 2026 15:42:18 -0500 Subject: [PATCH 58/66] Ensure that user scope is tracked more consistently. --- synapse/cortex.py | 6 +- synapse/lib/agenda.py | 28 +++--- synapse/lib/auth.py | 11 +-- synapse/lib/cell.py | 13 +-- synapse/lib/httpapi.py | 7 +- synapse/lib/logging.py | 3 + synapse/lib/nexus.py | 3 + synapse/lib/view.py | 147 +++++++++++++++--------------- synapse/tests/test_cortex.py | 23 +++++ synapse/tests/test_lib_httpapi.py | 24 ++--- 10 files changed, 143 insertions(+), 122 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index 2c971f74a04..c406db466a2 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -1058,7 +1058,7 @@ async def initServiceStorage(self): mesg = f'User {useriden} ({user.name}) has a rule on the "cortex" authgate. This authgate is not used ' \ f'for permission checks and will be removed in Synapse v3.0.0.' - logger.warning(mesg, extra=self.getLogExtra(user=useriden, username=user.name)) + logger.warning(mesg, extra=self.getLogExtra(target_user=useriden, target_username=user.name)) for roleiden in ag.gateroles.keys(): role = self.auth.role(roleiden) if role is None: @@ -1066,7 +1066,7 @@ async def initServiceStorage(self): mesg = f'Role {roleiden} ({role.name}) has a rule on the "cortex" authgate. This authgate is not used ' \ f'for permission checks and will be removed in Synapse v3.0.0.' - logger.warning(mesg, extra=self.getLogExtra(role=roleiden, rolename=role.name)) + logger.warning(mesg, extra=self.getLogExtra(target_role=roleiden, target_rolename=role.name)) self._initVaults() @@ -6512,8 +6512,6 @@ def _logStormQuery(self, text, user, info=None): # username and user here are logged twice; which represents a # difference between the auth user who initiated the action # and the target user who is executing the runtime. - info['username'] = user.name - info['user'] = user.iden info['hash'] = s_storm.queryhash(text) # TODO Understand why this is using s_logging.getLogExtra instead of self.getLogExtra extra = s_logging.getLogExtra(**info) diff --git a/synapse/lib/agenda.py b/synapse/lib/agenda.py index 48eacaa4e92..b4c513900d6 100644 --- a/synapse/lib/agenda.py +++ b/synapse/lib/agenda.py @@ -14,6 +14,7 @@ import synapse.lib.base as s_base import synapse.lib.coro as s_coro +import synapse.lib.scope as s_scope import synapse.lib.logging as s_logging # Agenda: manages running one-shot and periodic tasks in the future ("appointments") @@ -837,10 +838,7 @@ async def runloop(self): try: await self._execute(appt) except Exception as e: - extra = {'iden': appt.iden, 'name': appt.name, 'user': appt.creator, 'view': appt.view} - user = self.core.auth.user(appt.creator) - if user is not None: - extra['username'] = user.name + extra = {'iden': appt.iden, 'name': appt.name, 'view': appt.view} if isinstance(e, s_exc.SynErr): mesg = e.get('mesg', str(e)) else: # pragma: no cover @@ -863,16 +861,14 @@ async def _execute(self, appt): locked = user.info.get('locked') if locked: logger.warning(f'Cron {appt.iden} {appt.name} failed because creator {user.name} is locked', - extra=self.core.getLogExtra(iden=appt.iden, name=appt.name, user=appt.creator, - username=user.name)) + extra=self.core.getLogExtra(iden=appt.iden, name=appt.name)) await self._markfailed(appt, 'locked user') return view = self.core.getView(iden=appt.view, user=user) if view is None: logger.warning(f'Unknown view {appt.view} in stored appointment {appt.iden} {appt.name}', - extra=self.core.getLogExtra(iden=appt.iden, name=appt.name, user=appt.creator, - username=user.name, view=appt.view)) + extra=self.core.getLogExtra(iden=appt.iden, name=appt.name, view=appt.view)) await self._markfailed(appt, 'unknown view') return @@ -880,6 +876,7 @@ async def _execute(self, appt): coro = self._runJob(user, appt) task = self.core.runActiveTask(coro) + task._syn_scope['user'] = user appt.task = await self.core.boss.promotetask(task, f'Cron {appt.iden}', user, info=info) async def fini(): @@ -911,8 +908,7 @@ async def _runJob(self, user, appt): await self.core.addCronEdits(appt.iden, edits) logger.info(f'Agenda executing for iden={appt.iden}, name={appt.name} user={user.name}, view={appt.view}, query={appt.query}', - extra=self.core.getLogExtra(iden=appt.iden, name=appt.name, user=appt.creator, text=appt.query, - username=user.name, view=appt.iden)) + extra=self.core.getLogExtra(iden=appt.iden, name=appt.name, text=appt.query, view=appt.iden)) starttime = self._getNowTick() success = False @@ -939,9 +935,10 @@ async def _runJob(self, user, appt): elif mesg[0] == 'warn' and loglevel <= logging.WARNING: text = mesg[1].get('mesg', '') - # FIXME This pattern of mixing known and unkonwn values in a kwargs function can throw a TypeError - extra = self.core.getLogExtra(cron=appt.iden, **mesg[1]) - logger.warning(f'Cron job {appt.iden} issued warning: {text}', extra=extra) + _params = mesg[1] + _params['iden'] = appt.iden + logger.warning(f'Cron job {appt.iden} issued warning: {text}', + extra=self.core.getLogExtra(**_params)) elif mesg[0] == 'err': excname, errinfo = mesg[1] @@ -957,7 +954,7 @@ async def _runJob(self, user, appt): except Exception as e: result = f'raised exception {e}' logger.exception(f'Agenda job {appt.iden} {appt.name} raised exception', - extra=self.core.getLogInfo(iden=appt.iden, name=appt.name), + extra=self.core.getLogExtra(iden=appt.iden, name=appt.name), ) else: success = True @@ -981,8 +978,7 @@ async def _runJob(self, user, appt): f'took {took:.3f}s' if not self.core.isactive: mesg = mesg + ' Agenda status will not be saved since the Cortex is no longer the leader.' - logger.info(mesg, extra=self.core.getLogExtra(iden=appt.iden, name=appt.name, user=user.iden, - result=result, username=user.name, took=took)) + logger.info(mesg, extra=self.core.getLogExtra(iden=appt.iden, name=appt.name, result=result, took=took)) edits = { 'lastfinishtime': finishtime, 'isrunning': False, diff --git a/synapse/lib/auth.py b/synapse/lib/auth.py index 542b18ee58b..646db6d52de 100644 --- a/synapse/lib/auth.py +++ b/synapse/lib/auth.py @@ -1458,8 +1458,7 @@ async def tryPasswd(self, passwd, nexs=True, enforce_policy=True): if expires >= s_common.now(): if await s_passwd.checkShadowV2(passwd=passwd, shadow=shadow): await self.auth.setUserInfo(self.iden, 'onepass', None) - logger.debug(f'Used one time password for {self.name}', - extra=self.auth.nexsroot.cell.getLogExtra(user=self.iden, username=self.name)) + logger.debug(f'Used one time password for {self.name}') return True else: # Backwards compatible password handling @@ -1467,8 +1466,7 @@ async def tryPasswd(self, passwd, nexs=True, enforce_policy=True): if expires >= s_common.now(): if s_common.guid((params, passwd)) == hashed: await self.auth.setUserInfo(self.iden, 'onepass', None) - logger.debug(f'Used one time password for {self.name}', - extra=self.auth.nexsroot.cell.getLogExtra(user=self.iden, username=self.name)) + logger.debug(f'Used one time password for {self.name}') return True shadow = self.info.get('passwd') @@ -1498,7 +1496,7 @@ async def tryPasswd(self, passwd, nexs=True, enforce_policy=True): await self.auth.nexsroot.cell.setUserLocked(self.iden, True) - mesg = f'User {self.name} has exceeded the number of allowed password attempts ({valu + 1}), locking their account.' + mesg = f'User {self.name} has exceeded the number of allowed password attempts ({valu + 1}), locked their account.' logger.warning(mesg, extra=self.auth.nexsroot.cell.getLogExtra(target_user=self.iden, target_username=self.name, status='MODIFY')) @@ -1510,8 +1508,7 @@ async def tryPasswd(self, passwd, nexs=True, enforce_policy=True): # Backwards compatible password handling salt, hashed = shadow if s_common.guid((salt, passwd)) == hashed: - logger.debug(f'Migrating password to shadowv2 format for user {self.name}', - extra=self.auth.nexsroot.cell.getLogExtra(user=self.iden, username=self.name)) + logger.debug(f'Migrating password to shadowv2 format for user {self.name}') # Update user to new password hashing scheme. We cannot enforce policy # when migrating an existing password. await self.setPasswd(passwd=passwd, nexs=nexs, enforce_policy=False) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index 193905bbf26..c7d268af177 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -3505,16 +3505,16 @@ def _log_web_request(self, handler: s_httpapi.Handler) -> None: if headers: enfo['headers'] = headers + extra = s_logging.getLogExtra(**enfo) + # It is possible that a Cell implementor may register handlers which # do not derive from our Handler class, so we have to handle that. if hasattr(handler, 'web_useriden') and handler.web_useriden: user = handler.web_useriden - enfo['user'] = user + extra['loginfo'].setdefault('user', user) if hasattr(handler, 'web_username') and handler.web_username: username = handler.web_username - enfo['username'] = username - - extra = s_logging.getLogExtra(**enfo) + extra['loginfo'].setdefault('username', username) if user: mesg = f'{status} {handler.request.method} {uri} ({remote_ip}) user={user} ({username}) {request_time:.2f}ms' @@ -4577,8 +4577,9 @@ async def _getCellUser(self, link, mesg): # passwd None always fails... passwd = info.get('passwd') - if not await user.tryPasswd(passwd): - raise s_exc.AuthDeny(mesg='Invalid password', username=user.name, user=user.iden) + with s_scope.enter({'user': user}): + if not await user.tryPasswd(passwd): + raise s_exc.AuthDeny(mesg='Invalid password', username=user.name, user=user.iden) return user diff --git a/synapse/lib/httpapi.py b/synapse/lib/httpapi.py index e24017705da..aeba8f771e7 100644 --- a/synapse/lib/httpapi.py +++ b/synapse/lib/httpapi.py @@ -217,16 +217,17 @@ def logAuthIssue(self, mesg=None, user=None, username=None, level=logging.WARNIN enfo = {'uri': uri, 'remoteip': remote_ip, } + extra = s_logging.getLogExtra(**enfo) errm = f'Failed to authenticate request to {uri} from {remote_ip} ' if mesg: errm = f'{errm}: {mesg}' if user: errm = f'{errm}: user={user}' - enfo['user'] = user + extra['loginfo'].setdefault('user', user) if username: errm = f'{errm} ({username})' - enfo['username'] = username - logger.log(level, msg=errm, extra=s_logging.getLogExtra(**enfo)) + extra['loginfo'].setdefault('username', username) + logger.log(level, msg=errm, extra=extra) def sendAuthRequired(self): self.set_header('WWW-Authenticate', 'Basic realm=synapse') diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py index 5d8a1bf73ce..1aae11b2d45 100644 --- a/synapse/lib/logging.py +++ b/synapse/lib/logging.py @@ -79,6 +79,9 @@ def getLogExtra(**kwargs): ''' Construct a properly enveloped log extra dictionary. ''' + # TODO Remove these asserts before merging! + assert 'user' not in kwargs + assert 'username' not in kwargs extra = {'params': kwargs, 'loginfo': {}} return extra diff --git a/synapse/lib/nexus.py b/synapse/lib/nexus.py index 7a3a8c3fac4..ff8e32850f0 100644 --- a/synapse/lib/nexus.py +++ b/synapse/lib/nexus.py @@ -403,6 +403,9 @@ async def eat(self, nexsiden, event, args, kwargs, meta, wait=True): # Keep a reference to the shielded task to ensure it isn't GC'd self.applytask = asyncio.create_task(self._eat((nexsiden, event, args, kwargs, meta))) + # Clone the current scope to the applytask, so that log events have that scopes context. + # s_scope.clone(self.applytask) + except: self.cell.nexslock.release() raise diff --git a/synapse/lib/view.py b/synapse/lib/view.py index 3454560253c..b0c13aa31a1 100644 --- a/synapse/lib/view.py +++ b/synapse/lib/view.py @@ -957,38 +957,37 @@ async def eval(self, text, opts=None): async def callStorm(self, text, opts=None): user = self.core._userFromOpts(opts) - try: - - async for item in self.eval(text, opts=opts): - await asyncio.sleep(0) # pragma: no cover - - except s_stormctrl.StormReturn as e: - # Catch return( ... ) values and return the - # primitive version of that item. - return await s_stormtypes.toprim(e.item) + with s_scope.enter({'user': user}): + try: - except asyncio.CancelledError: - extra = self.core.getLogExtra(text=text) - logger.warning(f'callStorm cancelled', extra=extra) - raise + async for item in self.eval(text, opts=opts): + await asyncio.sleep(0) # pragma: no cover - except (s_stormctrl.StormLoopCtrl, s_stormctrl.StormGenrCtrl) as e: - if isinstance(e, s_stormctrl.StormLoopCtrl): - mesg = f'Loop control statement "{e.statement}" used outside of a loop.' - else: - mesg = f'Generator control statement "{e.statement}" used outside of a generator function.' - logmesg = f'Error during storm execution for {{ {text} }} - {mesg}' - extra = self.core.getLogExtra(text=text, user=user.iden, username=user.name) - logger.exception(logmesg, extra=extra) - raise s_exc.StormRuntimeError(mesg=mesg, statement=e.statement, highlight=e.get('highlight')) from e + except s_stormctrl.StormReturn as e: + # Catch return( ... ) values and return the + # primitive version of that item. + return await s_stormtypes.toprim(e.item) - except Exception: - extra = self.core.getLogExtra(text=text) - logger.exception(f'Error during callStorm execution for {{ {text} }}', extra=extra) - raise + except asyncio.CancelledError: + extra = self.core.getLogExtra(text=text) + logger.warning(f'callStorm cancelled', extra=extra) + raise - # Any other exceptions will be raised to - # callers as expected. + except (s_stormctrl.StormLoopCtrl, s_stormctrl.StormGenrCtrl) as e: + if isinstance(e, s_stormctrl.StormLoopCtrl): + mesg = f'Loop control statement "{e.statement}" used outside of a loop.' + else: + mesg = f'Generator control statement "{e.statement}" used outside of a generator function.' + logmesg = f'Error during storm execution for {{ {text} }} - {mesg}' + logger.exception(logmesg, extra=self.core.getLogExtra(text=text)) + raise s_exc.StormRuntimeError(mesg=mesg, statement=e.statement, highlight=e.get('highlight')) from e + + except Exception: + logger.exception(f'Error during callStorm execution for {{ {text} }}', + extra=self.core.getLogExtra(text=text, view=self.iden)) + # Any other exceptions will be raised to + # callers as expected. + raise async def nodes(self, text, opts=None): ''' @@ -1041,6 +1040,7 @@ async def runStorm(): tick = s_common.now() abstick = s_common.mononow() count = 0 + try: # Always start with an init message. @@ -1053,30 +1053,28 @@ async def runStorm(): shownode = (not show or 'node' in show) - with s_scope.enter({'user': user}): + async with await self.snap(user=user) as snap: - async with await self.snap(user=user) as snap: + if keepalive: + snap.schedCoro(snap.keepalive(keepalive)) - if keepalive: - snap.schedCoro(snap.keepalive(keepalive)) + if not show: + snap.link(chan.put) - if not show: - snap.link(chan.put) - - else: - [snap.on(n, chan.put) for n in show] + else: + [snap.on(n, chan.put) for n in show] - if shownode: - async for pode in snap.iterStormPodes(text, opts=opts, user=user): - await chan.put(('node', pode)) - count += 1 + if shownode: + async for pode in snap.iterStormPodes(text, opts=opts, user=user): + await chan.put(('node', pode)) + count += 1 - else: - info = opts.get('_loginfo', {}) - info.update({'mode': opts.get('mode', 'storm'), 'view': self.iden}) - self.core._logStormQuery(text, user, info=info) - async for item in snap.storm(text, opts=opts, user=user): - count += 1 + else: + info = opts.get('_loginfo', {}) + info.update({'mode': opts.get('mode', 'storm'), 'view': self.iden}) + self.core._logStormQuery(text, user, info=info) + async for item in snap.storm(text, opts=opts, user=user): + count += 1 except s_stormctrl.StormExit: pass @@ -1099,8 +1097,7 @@ async def runStorm(): if mesg: logmesg = f'{logmesg} - {mesg}' - extra = self.core.getLogExtra(text=text, user=user.iden, username=user.name) - logger.exception(logmesg, extra=extra) + logger.exception(logmesg, extra=self.core.getLogExtra(text=text, view=self.iden)) enfo = s_common.err(e) enfo[1].pop('esrc', None) @@ -1114,43 +1111,45 @@ async def runStorm(): tock = tick + abstook await chan.put(('fini', {'tock': tock, 'abstock': abstock, 'took': abstook, 'count': count, })) - await synt.worker(runStorm(), name='runstorm') - - editformat = opts.get('editformat', 'nodeedits') + with s_scope.enter({'user': user}): - while True: + await synt.worker(runStorm(), name='runstorm') - mesg = await chan.get() - kind = mesg[0] + editformat = opts.get('editformat', 'nodeedits') - if kind == 'node': - yield mesg - continue + while True: - if kind == 'node:edits': - if editformat == 'nodeedits': + mesg = await chan.get() + kind = mesg[0] - nodeedits = s_common.jsonsafe_nodeedits(mesg[1]['edits']) - mesg[1]['edits'] = nodeedits + if kind == 'node': yield mesg - continue - if editformat == 'none': - continue + if kind == 'node:edits': + if editformat == 'nodeedits': - assert editformat == 'count' + nodeedits = s_common.jsonsafe_nodeedits(mesg[1]['edits']) + mesg[1]['edits'] = nodeedits + yield mesg - count = sum(len(edit[2]) for edit in mesg[1].get('edits', ())) - mesg = ('node:edits:count', {'count': count}) - yield mesg - continue + continue - if kind == 'fini': - yield mesg - break + if editformat == 'none': + continue + + assert editformat == 'count' - yield mesg + count = sum(len(edit[2]) for edit in mesg[1].get('edits', ())) + mesg = ('node:edits:count', {'count': count}) + yield mesg + continue + + if kind == 'fini': + yield mesg + break + + yield mesg async def iterStormPodes(self, text, opts=None): diff --git a/synapse/tests/test_cortex.py b/synapse/tests/test_cortex.py index 6d0950254ea..d7d8107f3a1 100644 --- a/synapse/tests/test_cortex.py +++ b/synapse/tests/test_cortex.py @@ -3528,6 +3528,29 @@ async def test_storm_logging(self): mesg = stream.jsonlines()[0] self.eq(mesg['params'].get('view'), view) + self.eq(mesg['params'].get('text'), 'help foo') + self.eq(mesg['username'], 'root') + + udef = await core.addUser('foouser', ) + await core.setUserAdmin(udef.get('iden'), True) + asfoo = {'user': udef.get('iden')} + + with self.getLoggerStream('synapse.storm') as stream: + await alist(core.storm('help ask', opts=asfoo)) + + mesg = stream.jsonlines()[0] + self.eq(mesg['params'].get('view'), view) + self.eq(mesg['params'].get('text'), 'help ask') + self.eq(mesg['username'], 'foouser') + + q = '[test:str=hehe] [test:int=$node.value()]' + with self.getLoggerStream('synapse.lib.view') as stream: + await alist(core.storm(q, opts=asfoo)) + msgs = stream.jsonlines() + emsg = [m for m in msgs if 'Error during storm execution' in m.get('message')][0] + self.eq(emsg['params'].get('view'), view) + self.eq(emsg['params'].get('text'), q) + self.eq(emsg['username'], 'foouser') async def test_strict(self): diff --git a/synapse/tests/test_lib_httpapi.py b/synapse/tests/test_lib_httpapi.py index 41c24c40dd6..dd3fd8c30d0 100644 --- a/synapse/tests/test_lib_httpapi.py +++ b/synapse/tests/test_lib_httpapi.py @@ -1947,8 +1947,8 @@ async def test_request_logging(self): mesg = stream.jsonlines()[0] self.eq(mesg['params'].get('uri'), '/api/v1/auth/adduser') - self.eq(mesg['params'].get('username'), 'root') - self.eq(mesg['params'].get('user'), core.auth.rootuser.iden) + self.eq(mesg['username'], 'root') + self.eq(mesg['user'], core.auth.rootuser.iden) self.isin('headers', mesg['params']) self.eq(mesg['params']['headers'].get('user-agent'), 'test_request_logging') self.isin('remoteip', mesg['params']) @@ -1965,8 +1965,8 @@ async def test_request_logging(self): mesg = stream.jsonlines()[0] self.eq(mesg['params'].get('uri'), '/api/v1/active') self.notin('headers', mesg['params']) - self.notin('username', mesg['params']) - self.notin('user', mesg['params']) + self.notin('username', mesg) + self.notin('user', mesg) self.isin('remoteip', mesg['params']) self.isin('200 GET /api/v1/active', mesg.get('message')) @@ -1981,8 +1981,8 @@ async def test_request_logging(self): mesg = stream.jsonlines()[0] self.eq(mesg['params'].get('uri'), '/api/v1/login') - self.eq(mesg['params'].get('username'), 'visi') - self.eq(mesg['params'].get('user'), visiiden) + self.eq(mesg['username'], 'visi') + self.eq(mesg['user'], visiiden) # session cookie loging populates the data upon reuse with self.getLoggerStream(logname) as stream: @@ -1992,8 +1992,8 @@ async def test_request_logging(self): mesg = stream.jsonlines()[0] self.eq(mesg['params'].get('uri'), '/api/v1/auth/users') - self.eq(mesg['params'].get('username'), 'visi') - self.eq(mesg['params'].get('user'), visiiden) + self.eq(mesg['username'], 'visi') + self.eq(mesg['user'], visiiden) async with self.getTestCore(conf={'https:parse:proxy:remoteip': True}) as core: @@ -2021,8 +2021,8 @@ async def test_request_logging(self): mesg = stream.jsonlines()[0] self.eq(mesg['params'].get('uri'), '/api/v1/auth/adduser') - self.eq(mesg['params'].get('username'), 'root') - self.eq(mesg['params'].get('user'), core.auth.rootuser.iden) + self.eq(mesg['username'], 'root') + self.eq(mesg['user'], core.auth.rootuser.iden) self.eq(mesg['params'].get('remoteip'), '1.2.3.4') self.isin('(root)', mesg.get('message')) self.isin('200 POST /api/v1/auth/adduser', mesg.get('message')) @@ -2039,8 +2039,8 @@ async def test_request_logging(self): mesg = stream.jsonlines()[0] self.eq(mesg['params'].get('uri'), '/api/v1/auth/adduser') - self.eq(mesg['params'].get('username'), 'root') - self.eq(mesg['params'].get('user'), core.auth.rootuser.iden) + self.eq(mesg['username'], 'root') + self.eq(mesg['user'], core.auth.rootuser.iden) self.eq(mesg['params'].get('remoteip'), '8.8.8.8') self.isin('(root)', mesg.get('message')) self.isin('200 POST /api/v1/auth/adduser', mesg.get('message')) From 2db55de78777ddd4bad0295ef53e1f64fad7ddfe Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 5 Mar 2026 15:43:45 -0500 Subject: [PATCH 59/66] Clone the current scope onto the nexus applytastk --- synapse/lib/nexus.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/synapse/lib/nexus.py b/synapse/lib/nexus.py index ff8e32850f0..b93ed2205aa 100644 --- a/synapse/lib/nexus.py +++ b/synapse/lib/nexus.py @@ -403,8 +403,9 @@ async def eat(self, nexsiden, event, args, kwargs, meta, wait=True): # Keep a reference to the shielded task to ensure it isn't GC'd self.applytask = asyncio.create_task(self._eat((nexsiden, event, args, kwargs, meta))) - # Clone the current scope to the applytask, so that log events have that scopes context. - # s_scope.clone(self.applytask) + # Clone the current scope to the applytask, so that log events in the scope + # would have access to any user / sess values which have been set. + s_scope.clone(self.applytask) except: self.cell.nexslock.release() From 28d79a3490fc3bc6066f78270a8053305068626d Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 5 Mar 2026 19:25:35 -0500 Subject: [PATCH 60/66] Update a getLogExtra call --- synapse/lib/httpapi.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/synapse/lib/httpapi.py b/synapse/lib/httpapi.py index aeba8f771e7..48383ef78b6 100644 --- a/synapse/lib/httpapi.py +++ b/synapse/lib/httpapi.py @@ -214,10 +214,7 @@ def logAuthIssue(self, mesg=None, user=None, username=None, level=logging.WARNIN ''' uri = self.request.uri remote_ip = self.request.remote_ip - enfo = {'uri': uri, - 'remoteip': remote_ip, - } - extra = s_logging.getLogExtra(**enfo) + extra = s_logging.getLogExtra(uri=uri, remote_ip=remote_ip) errm = f'Failed to authenticate request to {uri} from {remote_ip} ' if mesg: errm = f'{errm}: {mesg}' From d649b8c9ddaf7a7da1c0da7427808c6c6b8175dc Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 5 Mar 2026 21:53:11 -0500 Subject: [PATCH 61/66] Remove unused import --- synapse/common.py | 1 - 1 file changed, 1 deletion(-) diff --git a/synapse/common.py b/synapse/common.py index 3f04f20689f..9bad0bdac02 100644 --- a/synapse/common.py +++ b/synapse/common.py @@ -23,7 +23,6 @@ import binascii import builtins import tempfile -import warnings import functools import itertools import threading From b0fc9ce11a205fcb93c5529e81b9b95e1f22bf18 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 5 Mar 2026 22:17:56 -0500 Subject: [PATCH 62/66] Set the user scope when starting dmon's right away --- synapse/cortex.py | 11 +++-------- synapse/lib/storm.py | 4 ++-- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index c406db466a2..155115672b7 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -1058,7 +1058,7 @@ async def initServiceStorage(self): mesg = f'User {useriden} ({user.name}) has a rule on the "cortex" authgate. This authgate is not used ' \ f'for permission checks and will be removed in Synapse v3.0.0.' - logger.warning(mesg, extra=self.getLogExtra(target_user=useriden, target_username=user.name)) + logger.warning(mesg) for roleiden in ag.gateroles.keys(): role = self.auth.role(roleiden) if role is None: @@ -1066,7 +1066,7 @@ async def initServiceStorage(self): mesg = f'Role {roleiden} ({role.name}) has a rule on the "cortex" authgate. This authgate is not used ' \ f'for permission checks and will be removed in Synapse v3.0.0.' - logger.warning(mesg, extra=self.getLogExtra(target_role=roleiden, target_rolename=role.name)) + logger.warning(mesg) self._initVaults() @@ -6509,14 +6509,9 @@ def _logStormQuery(self, text, user, info=None): if info is None: info = {} info['text'] = text - # username and user here are logged twice; which represents a - # difference between the auth user who initiated the action - # and the target user who is executing the runtime. info['hash'] = s_storm.queryhash(text) - # TODO Understand why this is using s_logging.getLogExtra instead of self.getLogExtra - extra = s_logging.getLogExtra(**info) stormlogger.log(self.stormloglvl, 'Executing storm query {%s} as [%s]', text, user.name, - extra=extra) + extra=self.getLogExtra(**info)) async def getNodeByNdef(self, ndef, view=None): ''' diff --git a/synapse/lib/storm.py b/synapse/lib/storm.py index 6010c69392d..93ead1f883a 100644 --- a/synapse/lib/storm.py +++ b/synapse/lib/storm.py @@ -1290,11 +1290,11 @@ def _getRunLog(self): async def dmonloop(self): - logger.debug(f'Starting Dmon {self.iden}', extra=self.core.getLogExtra(iden=self.iden)) - s_scope.set('user', self.user) s_scope.set('storm:dmon', self.iden) + logger.debug(f'Starting Dmon {self.iden}', extra=self.core.getLogExtra(iden=self.iden)) + text = self.ddef.get('storm') opts = self.ddef.get('stormopts', {}) From aef4b6d70f400f6a1fd935ab0738ed1525117a02 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 5 Mar 2026 22:25:17 -0500 Subject: [PATCH 63/66] http auth failures go to target_user / target_username" --- synapse/lib/httpapi.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/synapse/lib/httpapi.py b/synapse/lib/httpapi.py index 48383ef78b6..5b2186dfaa2 100644 --- a/synapse/lib/httpapi.py +++ b/synapse/lib/httpapi.py @@ -214,17 +214,20 @@ def logAuthIssue(self, mesg=None, user=None, username=None, level=logging.WARNIN ''' uri = self.request.uri remote_ip = self.request.remote_ip - extra = s_logging.getLogExtra(uri=uri, remote_ip=remote_ip) + erfo = { + 'uri': uri, + 'remote_ip': remote_ip, + } errm = f'Failed to authenticate request to {uri} from {remote_ip} ' if mesg: errm = f'{errm}: {mesg}' if user: errm = f'{errm}: user={user}' - extra['loginfo'].setdefault('user', user) + erfo['target_user'] = user if username: errm = f'{errm} ({username})' - extra['loginfo'].setdefault('username', username) - logger.log(level, msg=errm, extra=extra) + erfo['target_username'] = username + logger.log(level, msg=errm, extra=s_logging.getLogExtra(**erfo)) def sendAuthRequired(self): self.set_header('WWW-Authenticate', 'Basic realm=synapse') From e9dd62aa6aae31d57cd06660890dfa2aa3e0b75e Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 5 Mar 2026 22:25:29 -0500 Subject: [PATCH 64/66] Revert "Set the user scope when starting dmon's right away" The original caller user is the one starting the dmon, not the other way around. This reverts commit b0fc9ce11a205fcb93c5529e81b9b95e1f22bf18. --- synapse/cortex.py | 11 ++++++++--- synapse/lib/storm.py | 4 ++-- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index 155115672b7..c406db466a2 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -1058,7 +1058,7 @@ async def initServiceStorage(self): mesg = f'User {useriden} ({user.name}) has a rule on the "cortex" authgate. This authgate is not used ' \ f'for permission checks and will be removed in Synapse v3.0.0.' - logger.warning(mesg) + logger.warning(mesg, extra=self.getLogExtra(target_user=useriden, target_username=user.name)) for roleiden in ag.gateroles.keys(): role = self.auth.role(roleiden) if role is None: @@ -1066,7 +1066,7 @@ async def initServiceStorage(self): mesg = f'Role {roleiden} ({role.name}) has a rule on the "cortex" authgate. This authgate is not used ' \ f'for permission checks and will be removed in Synapse v3.0.0.' - logger.warning(mesg) + logger.warning(mesg, extra=self.getLogExtra(target_role=roleiden, target_rolename=role.name)) self._initVaults() @@ -6509,9 +6509,14 @@ def _logStormQuery(self, text, user, info=None): if info is None: info = {} info['text'] = text + # username and user here are logged twice; which represents a + # difference between the auth user who initiated the action + # and the target user who is executing the runtime. info['hash'] = s_storm.queryhash(text) + # TODO Understand why this is using s_logging.getLogExtra instead of self.getLogExtra + extra = s_logging.getLogExtra(**info) stormlogger.log(self.stormloglvl, 'Executing storm query {%s} as [%s]', text, user.name, - extra=self.getLogExtra(**info)) + extra=extra) async def getNodeByNdef(self, ndef, view=None): ''' diff --git a/synapse/lib/storm.py b/synapse/lib/storm.py index 93ead1f883a..6010c69392d 100644 --- a/synapse/lib/storm.py +++ b/synapse/lib/storm.py @@ -1290,11 +1290,11 @@ def _getRunLog(self): async def dmonloop(self): + logger.debug(f'Starting Dmon {self.iden}', extra=self.core.getLogExtra(iden=self.iden)) + s_scope.set('user', self.user) s_scope.set('storm:dmon', self.iden) - logger.debug(f'Starting Dmon {self.iden}', extra=self.core.getLogExtra(iden=self.iden)) - text = self.ddef.get('storm') opts = self.ddef.get('stormopts', {}) From 9c7a00e96064824b38e645d08f16e4507b7f7481 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Thu, 5 Mar 2026 22:39:19 -0500 Subject: [PATCH 65/66] Tidy --- synapse/cortex.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/synapse/cortex.py b/synapse/cortex.py index c406db466a2..155115672b7 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -1058,7 +1058,7 @@ async def initServiceStorage(self): mesg = f'User {useriden} ({user.name}) has a rule on the "cortex" authgate. This authgate is not used ' \ f'for permission checks and will be removed in Synapse v3.0.0.' - logger.warning(mesg, extra=self.getLogExtra(target_user=useriden, target_username=user.name)) + logger.warning(mesg) for roleiden in ag.gateroles.keys(): role = self.auth.role(roleiden) if role is None: @@ -1066,7 +1066,7 @@ async def initServiceStorage(self): mesg = f'Role {roleiden} ({role.name}) has a rule on the "cortex" authgate. This authgate is not used ' \ f'for permission checks and will be removed in Synapse v3.0.0.' - logger.warning(mesg, extra=self.getLogExtra(target_role=roleiden, target_rolename=role.name)) + logger.warning(mesg) self._initVaults() @@ -6509,14 +6509,9 @@ def _logStormQuery(self, text, user, info=None): if info is None: info = {} info['text'] = text - # username and user here are logged twice; which represents a - # difference between the auth user who initiated the action - # and the target user who is executing the runtime. info['hash'] = s_storm.queryhash(text) - # TODO Understand why this is using s_logging.getLogExtra instead of self.getLogExtra - extra = s_logging.getLogExtra(**info) stormlogger.log(self.stormloglvl, 'Executing storm query {%s} as [%s]', text, user.name, - extra=extra) + extra=self.getLogExtra(**info)) async def getNodeByNdef(self, ndef, view=None): ''' From e8a1228ffd1fcc8f6e47243e6c8907daa18910f1 Mon Sep 17 00:00:00 2001 From: epiphyte Date: Fri, 6 Mar 2026 08:04:08 -0500 Subject: [PATCH 66/66] Update devops docs --- docs/synapse/devopsguide.rst | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/docs/synapse/devopsguide.rst b/docs/synapse/devopsguide.rst index ccbe06b84c6..8096d2d6de8 100644 --- a/docs/synapse/devopsguide.rst +++ b/docs/synapse/devopsguide.rst @@ -356,22 +356,19 @@ message, level, time, and metadata about where the log message came from:: }, "level": "INFO", "time": "2026-02-13 10:38:24,545", - "user": "a8b94560fd9b6e1e38245a6f7d659106", - "username": "root", + "user": "e3532bc88fa66afb592e6a1474a98675", + "username": "someUsername", "params": { # Param "mode": "storm", "view": "715a5c9ae37e4045795ea3f3cabb44fb", "text": "[inet:asn=1234]", - "username": "someUsername", - "user": "e3532bc88fa66afb592e6a1474a98675", "hash": "ef94e89eb3bc309a40242876f6c5f296" } } The ``user`` and ``username`` fields at the top level of a log correspond to the currently active / authorized API user -which has caused a log event to occur. These may differ from the user associated with the event. That information would -normally be embedded in the ``params`` key. The ``ahaservice`` key, if present, indicates the service that is -associated with the log message. +which has caused a log event to occur. The ``service`` key, if present, indicates the AHA service that is associated +with the log message. When exceptions are logged with structured logging, we capture additional information about the exception, including the traceback as structured data. In the event that the error is a Synapse Err class, we also capture additional metadata @@ -391,8 +388,8 @@ in the pretty-printed log message. The ``key=valu`` data that was raised by the }, "level": "ERROR", "time": "2026-02-13 11:24:06,853", - "user": "e889d295b19fd5c12f861cd6fe564aa8", - "username": "root", + "user": "e3532bc88fa66afb592e6a1474a98675", + "username": "someUsername" "error": { "code": "StormRaise", "traceback": [ @@ -414,8 +411,6 @@ in the pretty-printed log message. The ``key=valu`` data that was raised by the }, "params": { "text": "$lib.raise(Newp, 'ruh roh', key=valu)", - "user": "e3532bc88fa66afb592e6a1474a98675", - "username": "someUsername" } }