diff --git a/synapse/axon.py b/synapse/axon.py index a578f79dc89..ce04cbf359f 100644 --- a/synapse/axon.py +++ b/synapse/axon.py @@ -1067,7 +1067,7 @@ async def get(self, sha256, offs=None, size=None): fsize = await self._reqHas(sha256) fhash = s_common.ehex(sha256) - logger.debug(f'Getting blob [{fhash}].', extra=await self.getLogExtra(sha256=fhash)) + logger.debug(f'Getting blob [{fhash}].', extra=self.getLogExtra(sha256=fhash)) if offs is not None or size is not None: @@ -1203,7 +1203,7 @@ async def hashset(self, sha256): await self._reqHas(sha256) fhash = s_common.ehex(sha256) - logger.debug(f'Getting blob [{fhash}].', extra=await self.getLogExtra(sha256=fhash)) + logger.debug(f'Getting blob [{fhash}].', extra=self.getLogExtra(sha256=fhash)) hashset = s_hashset.HashSet() @@ -1251,7 +1251,7 @@ async def _populate(self, sha256, genr, size): return int.from_bytes(byts, 'big') fhash = s_common.ehex(sha256) - logger.debug(f'Saving blob [{fhash}].', extra=await self.getLogExtra(sha256=fhash)) + logger.debug(f'Saving blob [{fhash}].', extra=self.getLogExtra(sha256=fhash)) size = await self._saveFileGenr(sha256, genr, size) @@ -1378,7 +1378,7 @@ async def _axonFileDel(self, sha256): return False fhash = s_common.ehex(sha256) - logger.debug(f'Deleting blob [{fhash}].', extra=await self.getLogExtra(sha256=fhash)) + logger.debug(f'Deleting blob [{fhash}].', extra=self.getLogExtra(sha256=fhash)) size = int.from_bytes(byts, 'big') self.axonmetrics.inc('file:count', valu=-1) @@ -1447,7 +1447,7 @@ async def readlines(self, sha256, errors='ignore'): todo = s_common.todo(_spawn_readlines, sock00, errors=errors) async with await s_base.Base.anit() as scope: - scope.schedCoro(s_process.spawn(todo, log_conf=await self._getSpawnLogConf())) + scope.schedCoro(s_process.spawn(todo, logconf=self.getLogConf())) feedtask = scope.schedCoro(self._sha256ToLink(sha256, link00)) while not self.isfini: @@ -1481,7 +1481,7 @@ async def csvrows(self, sha256, dialect='excel', errors='ignore', **fmtparams): todo = s_common.todo(_spawn_readrows, sock00, dialect, fmtparams, errors=errors) async with await s_base.Base.anit() as scope: - scope.schedCoro(s_process.spawn(todo, log_conf=await self._getSpawnLogConf())) + scope.schedCoro(s_process.spawn(todo, logconf=self.getLogConf())) feedtask = scope.schedCoro(self._sha256ToLink(sha256, link00)) while not self.isfini: @@ -1817,7 +1817,7 @@ async def wget(self, url, params=None, headers=None, json=None, body=None, metho Returns: dict: An information dictionary containing the results of the request. ''' - logger.debug(f'Wget called for [{url}].', extra=await self.getLogExtra(url=s_urlhelp.sanitizeUrl(url))) + logger.debug(f'Wget called for [{url}].', extra=self.getLogExtra(url=s_urlhelp.sanitizeUrl(url))) ssl = self.getCachedSslCtx(opts=ssl_opts, verify=ssl) diff --git a/synapse/common.py b/synapse/common.py index 8f941477d20..ff6b5f1a0e4 100644 --- a/synapse/common.py +++ b/synapse/common.py @@ -39,6 +39,7 @@ import synapse.exc as s_exc import synapse.lib.const as s_const +import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack import synapse.lib.structlog as s_structlog @@ -763,108 +764,6 @@ def makedirs(path, mode=0o777): def iterzip(*args, fillvalue=None): return itertools.zip_longest(*args, fillvalue=fillvalue) -def _getLogConfFromEnv(defval=None, structlog=None, datefmt=None): - if structlog: - structlog = 'true' - else: - structlog = 'false' - defval = os.getenv('SYN_LOG_LEVEL', defval) - datefmt = os.getenv('SYN_LOG_DATEFORMAT', datefmt) - structlog = envbool('SYN_LOG_STRUCT', structlog) - ret = {'defval': defval, 'structlog': structlog, 'datefmt': datefmt} - return ret - -def normLogLevel(valu): - ''' - Norm a log level value to a integer. - - Args: - valu: The value to norm ( a string or integer ). - - Returns: - int: A valid Logging log level. - ''' - if isinstance(valu, int): - if valu not in s_const.LOG_LEVEL_INVERSE_CHOICES: - raise s_exc.BadArg(mesg=f'Invalid log level provided: {valu}', valu=valu) - return valu - if isinstance(valu, str): - valu = valu.strip() - try: - valu = int(valu) - except ValueError: - valu = valu.upper() - ret = s_const.LOG_LEVEL_CHOICES.get(valu) - if ret is None: - raise s_exc.BadArg(mesg=f'Invalid log level provided: {valu}', valu=valu) from None - return ret - else: - return normLogLevel(valu) - raise s_exc.BadArg(mesg=f'Unknown log level type: {type(valu)} {valu}', valu=valu) - -def setlogging(mlogger, defval=None, structlog=None, log_setup=True, datefmt=None): - ''' - Configure synapse logging. - - Args: - mlogger (logging.Logger): Reference to a logging.Logger() - defval (str): Default log level. May be an integer. - structlog (bool): Enabled structured (jsonl) logging output. - datefmt (str): Optional strftime format string. - - Notes: - This calls logging.basicConfig and should only be called once per process. - - Returns: - None - ''' - ret = _getLogConfFromEnv(defval, structlog, datefmt) - - datefmt = ret.get('datefmt') - log_level = ret.get('defval') - log_struct = ret.get('structlog') - - if log_level: # pragma: no cover - - log_level = normLogLevel(log_level) - - # See https://docs.python.org/3/howto/logging-cookbook.html#blocking-handlers for info on this configuration - logq = queue.SimpleQueue() - handler = logging.handlers.QueueHandler(logq) - stream = logging.StreamHandler() - - listener = logging.handlers.QueueListener(logq, stream) - listener.start() - - def logfini(): - # On shutdown, stop the QueueListener, remove QueueHandler from the - # logger, and add the StreamHandler so we don't lose messages that - # are logged after the QueueListener has shutdown. Messages already - # in the QueueListener will flush as part of the stop(). This is all - # required because atexit handlers are called in reverse order of - # when they were registered. So, we want to make sure to have - # functional logging until the process is completely gone. - listener.stop() - logging.root.removeHandler(handler) - logging.root.addHandler(stream) - - atexit.register(logfini) - - if log_struct: - formatter = s_structlog.JsonFormatter(datefmt=datefmt) - else: - formatter = logging.Formatter(fmt=s_const.LOG_FORMAT, datefmt=datefmt) - - stream.setFormatter(formatter) - - logging.root.setLevel(log_level) - logging.root.addHandler(handler) - - if log_setup: - mlogger.info('log level set to %s', s_const.LOG_LEVEL_INVERSE_CHOICES.get(log_level)) - - return ret - syndir_default = '~/.syn' syndir = os.getenv('SYN_DIR') if syndir is None: @@ -959,14 +858,14 @@ def config(conf, confdefs): @functools.lru_cache(maxsize=1024) def deprecated(name, curv='2.x', eolv='3.0.0'): - mesg = f'"{name}" is deprecated in {curv} and will be removed in {eolv}' - logger.warning(mesg, extra={'synapse': {'curv': curv, 'eolv': eolv}}) + mesg = f'{name} is deprecated in {curv} and will be removed in {eolv}' + logger.warning(mesg, extra=s_logging.getLogExtra(curv=curv, eolv=eolv)) return mesg @functools.lru_cache(maxsize=1024) def deprdate(name, date): # pragma: no cover mesg = f'{name} is deprecated and will be removed on {date}.' - logger.warning(mesg, extra={'synapse': {'eold': date}}) + logger.warning(mesg, extra=s_logging.getLogExtra(eold=date)) return mesg def jsonsafe_nodeedits(nodeedits): diff --git a/synapse/cortex.py b/synapse/cortex.py index 18c39289a81..278c47534e9 100644 --- a/synapse/cortex.py +++ b/synapse/cortex.py @@ -35,6 +35,7 @@ import synapse.lib.dyndeps as s_dyndeps import synapse.lib.grammar as s_grammar import synapse.lib.httpapi as s_httpapi +import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack import synapse.lib.modules as s_modules import synapse.lib.schemas as s_schemas @@ -965,7 +966,7 @@ async def initServiceStorage(self): self._initCorePerms() # Reset the storm:log:level from the config value to an int for internal use. - self.conf['storm:log:level'] = s_common.normLogLevel(self.conf.get('storm:log:level')) + self.conf['storm:log:level'] = s_logging.normLogLevel(self.conf.get('storm:log:level')) self.stormlog = self.conf.get('storm:log') self.stormloglvl = self.conf.get('storm:log:level') @@ -1057,7 +1058,7 @@ async def initServiceStorage(self): mesg = f'User {useriden} ({user.name}) has a rule on the "cortex" authgate. This authgate is not used ' \ f'for permission checks and will be removed in Synapse v3.0.0.' - logger.warning(mesg, extra=await self.getLogExtra(user=useriden, username=user.name)) + logger.warning(mesg, extra=self.getLogExtra(user=useriden, username=user.name)) for roleiden in ag.gateroles.keys(): role = self.auth.role(roleiden) if role is None: @@ -1065,7 +1066,7 @@ async def initServiceStorage(self): mesg = f'Role {roleiden} ({role.name}) has a rule on the "cortex" authgate. This authgate is not used ' \ f'for permission checks and will be removed in Synapse v3.0.0.' - logger.warning(mesg, extra=await self.getLogExtra(role=roleiden, rolename=role.name)) + logger.warning(mesg, extra=self.getLogExtra(role=roleiden, rolename=role.name)) self._initVaults() @@ -3025,7 +3026,7 @@ async def _normStormPkg(self, pkgdef, validstorm=True): name = cdef.get('name') mesg = f"Storm command definition 'forms' key is deprecated and will be removed " \ f"in 3.0.0 (command {name} in package {pkgname})" - logger.warning(mesg, extra=await self.getLogExtra(name=name, pkgname=pkgname)) + logger.warning(mesg, extra=self.getLogExtra(name=name, pkgname=pkgname)) for gdef in pkgdef.get('graphs', ()): gdef['iden'] = s_common.guid((pkgname, gdef.get('name'))) @@ -3094,7 +3095,7 @@ async def _onload(): await self.fire('core:pkg:onload:start', pkg=name) - logextra = await self.getLogExtra(pkg=name, vers=pkgvers) + logextra = self.getLogExtra(pkg=name, vers=pkgvers) verskey = 'storage:version' @@ -3128,7 +3129,7 @@ async def _onload(): await self.setStormPkgVar(name, verskey, vers) continue - logextra['synapse']['initvers'] = vers + logextra['params']['initvers'] = vers logger.info(f'{name} starting init vers={vers}: {vname}', extra=logextra) @@ -6158,7 +6159,7 @@ async def count(self, text, opts=None): if proxy is not None: proxname = proxy._ahainfo.get('name') - extra = await self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) + extra = self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) logger.info(f'Offloading Storm query to mirror {proxname}.', extra=extra) mirropts = await self._getMirrorOpts(opts) @@ -6226,21 +6227,21 @@ async def _getMirrorProxy(self, opts): return proxy mesg = f'Pool mirror [{proxyname}] is too far out of sync. Skipping.' - logger.warning(mesg, extra=await self.getLogExtra(delta=delta, mirror=proxyname, mirror_offset=miroffs)) + logger.warning(mesg, extra=self.getLogExtra(delta=delta, mirror=proxyname, mirror_offset=miroffs)) except s_exc.ShuttingDown: mesg = f'Proxy for pool mirror [{proxyname}] is shutting down. Skipping.' - logger.warning(mesg, extra=await self.getLogExtra(mirror=proxyname)) + logger.warning(mesg, extra=self.getLogExtra(mirror=proxyname)) except s_exc.IsFini: mesg = f'Proxy for pool mirror [{proxyname}] was shutdown. Skipping.' - logger.warning(mesg, extra=await self.getLogExtra(mirror=proxyname)) + logger.warning(mesg, extra=self.getLogExtra(mirror=proxyname)) except TimeoutError: mesg = f'Timeout waiting for pool mirror [{proxyname}] Nexus offset.' - logger.warning(mesg, extra=await self.getLogExtra(mirror=proxyname)) + logger.warning(mesg, extra=self.getLogExtra(mirror=proxyname)) - logger.warning('Pool members exhausted. Running query locally.', extra=await self.getLogExtra()) + logger.warning('Pool members exhausted. Running query locally.', extra=self.getLogExtra()) return None async def storm(self, text, opts=None): @@ -6252,7 +6253,7 @@ async def storm(self, text, opts=None): if proxy is not None: proxname = proxy._ahainfo.get('name') - extra = await self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) + extra = self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) logger.info(f'Offloading Storm query to mirror {proxname}.', extra=extra) mirropts = await self._getMirrorOpts(opts) @@ -6286,7 +6287,7 @@ async def callStorm(self, text, opts=None): if proxy is not None: proxname = proxy._ahainfo.get('name') - extra = await self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) + extra = self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) logger.info(f'Offloading Storm query to mirror {proxname}.', extra=extra) mirropts = await self._getMirrorOpts(opts) @@ -6316,7 +6317,7 @@ async def exportStorm(self, text, opts=None): if proxy is not None: proxname = proxy._ahainfo.get('name') - extra = await self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) + extra = self.getLogExtra(mirror=proxname, hash=s_storm.queryhash(text)) logger.info(f'Offloading Storm query to mirror {proxname}.', extra=extra) mirropts = await self._getMirrorOpts(opts) @@ -6443,8 +6444,9 @@ async def stormlist(self, text, opts=None): async def _getStormEval(self, text): try: astvalu = copy.deepcopy(await s_parser.evalcache.aget(text)) - except s_exc.FatalErr: - logger.exception(f'Fatal error while parsing [{text}]', extra={'synapse': {'text': text}}) + except s_exc.FatalErr: # pragma: no cover + extra = self.getLogExtra(text=text) + logger.exception(f'Fatal error while parsing [{text}]', extra=extra) await self.fini() raise astvalu.init(self) @@ -6453,8 +6455,9 @@ async def _getStormEval(self, text): async def _getStormQuery(self, args): try: query = copy.deepcopy(await s_parser.querycache.aget(args)) - except s_exc.FatalErr: - logger.exception(f'Fatal error while parsing [{args}]', extra={'synapse': {'text': args[0]}}) + except s_exc.FatalErr: # pragma: no cover + extra = self.getLogExtra(text=args[0]) + logger.exception(f'Fatal error while parsing [{args}]', extra=extra) await self.fini() raise query.init(self) @@ -6507,8 +6510,9 @@ def _logStormQuery(self, text, user, info=None): info['username'] = user.name info['user'] = user.iden info['hash'] = s_storm.queryhash(text) + extra = s_logging.getLogExtra(**info) stormlogger.log(self.stormloglvl, 'Executing storm query {%s} as [%s]', text, user.name, - extra={'synapse': info}) + extra=extra) async def getNodeByNdef(self, ndef, view=None): ''' @@ -6989,7 +6993,7 @@ async def enableCronJob(self, iden): ''' await self.agenda.enable(iden) await self.feedBeholder('cron:enable', {'iden': iden}, gates=[iden]) - logger.info(f'Enabled cron job {iden}', extra=await self.getLogExtra(iden=iden, status='MODIFY')) + logger.info(f'Enabled cron job {iden}', extra=self.getLogExtra(iden=iden, status='MODIFY')) @s_nexus.Pusher.onPushAuto('cron:disable') async def disableCronJob(self, iden): @@ -7002,7 +7006,7 @@ async def disableCronJob(self, iden): await self.agenda.disable(iden) await self._killCronTask(iden) await self.feedBeholder('cron:disable', {'iden': iden}, gates=[iden]) - logger.info(f'Disabled cron job {iden}', extra=await self.getLogExtra(iden=iden, status='MODIFY')) + logger.info(f'Disabled cron job {iden}', extra=self.getLogExtra(iden=iden, status='MODIFY')) async def killCronTask(self, iden): if self.agenda.appts.get(iden) is None: diff --git a/synapse/lib/agenda.py b/synapse/lib/agenda.py index dd0d72ebda9..9f239832dea 100644 --- a/synapse/lib/agenda.py +++ b/synapse/lib/agenda.py @@ -14,6 +14,7 @@ import synapse.lib.base as s_base import synapse.lib.coro as s_coro +import synapse.lib.logging as s_logging # Agenda: manages running one-shot and periodic tasks in the future ("appointments") @@ -424,7 +425,7 @@ def updateNexttime(self, now): async def edits(self, edits): for name, valu in edits.items(): if name not in self.__class__._synced_attrs: - extra = await self.stor.core.getLogExtra(name=name, valu=valu) + extra = self.stor.core.getLogExtra(name=name, valu=valu) logger.warning('_Appt.edits() Invalid attribute received: %s = %r', name, valu, extra=extra) continue @@ -916,7 +917,7 @@ async def _runJob(self, user, appt): starttime = self._getNowTick() success = False - loglevel = s_common.normLogLevel(appt.loglevel) + loglevel = s_logging.normLogLevel(appt.loglevel) try: opts = { @@ -939,7 +940,7 @@ async def _runJob(self, user, appt): elif mesg[0] == 'warn' and loglevel <= logging.WARNING: text = mesg[1].get('mesg', '') - extra = await self.core.getLogExtra(cron=appt.iden, **mesg[1]) + extra = self.core.getLogExtra(cron=appt.iden, **mesg[1]) logger.warning(f'Cron job {appt.iden} issued warning: {text}', extra=extra) elif mesg[0] == 'err': diff --git a/synapse/lib/aha.py b/synapse/lib/aha.py index e64628e4b44..d62c44912b6 100644 --- a/synapse/lib/aha.py +++ b/synapse/lib/aha.py @@ -209,11 +209,11 @@ async def addAhaSvc(self, name, info, network=None): async def fini(): if self.cell.isfini: # pragma: no cover mesg = f'{self.cell.__class__.__name__} is fini. Unable to set {name}@{network} as down.' - logger.warning(mesg, await self.cell.getLogExtra(name=svcname, netw=svcnetw)) + logger.warning(mesg, extra=self.cell.getLogExtra(name=svcname, netw=svcnetw)) return logger.info(f'AhaCellApi fini, setting service offline [{name}]', - extra=await self.cell.getLogExtra(name=svcname, netw=svcnetw)) + extra=self.cell.getLogExtra(name=svcname, netw=svcnetw)) coro = self.cell.setAhaSvcDown(name, sess, network=network) self.cell.schedCoro(coro) # this will eventually execute or get cancelled. @@ -400,14 +400,14 @@ async def _getSharedItem(self, name): anam = conf.get('aha:name') anet = conf.get('aha:network') mesg = f'Retrieved service provisioning info for {anam}.{anet} iden {name}' - logger.info(mesg, extra=await self.aha.getLogExtra(iden=name, name=anam, netw=anet)) + logger.info(mesg, extra=self.aha.getLogExtra(iden=name, name=anam, netw=anet)) return ProvApi(self.aha, provinfo) userinfo = await self.aha.getAhaUserEnroll(name) if userinfo is not None: unam = userinfo.get('name') mesg = f'Retrieved user provisioning info for {unam} iden {name}' - logger.info(mesg, extra=await self.aha.getLogExtra(iden=name, name=unam)) + logger.info(mesg, extra=self.aha.getLogExtra(iden=name, name=unam)) await self.aha.delAhaUserEnroll(name) return EnrollApi(self.aha, userinfo) @@ -415,7 +415,7 @@ async def _getSharedItem(self, name): if clone is not None: host = clone.get('host') mesg = f'Retrieved AHA clone info for {host} iden {name}' - logger.info(mesg, extra=await self.aha.getLogExtra(iden=name, host=host)) + logger.info(mesg, extra=self.aha.getLogExtra(iden=name, host=host)) return CloneApi(self.aha, clone) mesg = f'Invalid provisioning identifier name={name}. This could be' \ @@ -471,7 +471,7 @@ async def signUserCsr(self, byts): raise s_exc.BadArg(mesg=mesg) logger.info(f'Signing user CSR for [{username}], signas={ahanetw}', - extra=await self.aha.getLogExtra(name=username, signas=ahanetw)) + extra=self.aha.getLogExtra(name=username, signas=ahanetw)) pkey, cert = self.aha.certdir.signUserCsr(xcsr, ahanetw, save=False) return self.aha.certdir._certToByts(cert) @@ -503,7 +503,7 @@ async def signHostCsr(self, byts): raise s_exc.BadArg(mesg=mesg) logger.info(f'Signing host CSR for [{hostname}], signas={ahanetw}', - extra=await self.aha.getLogExtra(name=hostname, signas=ahanetw)) + extra=self.aha.getLogExtra(name=hostname, signas=ahanetw)) pkey, cert = self.aha.certdir.signHostCsr(xcsr, ahanetw, save=False) return self.aha.certdir._certToByts(cert) @@ -522,7 +522,7 @@ async def signUserCsr(self, byts): raise s_exc.BadArg(mesg=mesg) logger.info(f'Signing user CSR for [{username}], signas={ahanetw}', - extra=await self.aha.getLogExtra(name=username, signas=ahanetw)) + extra=self.aha.getLogExtra(name=username, signas=ahanetw)) pkey, cert = self.aha.certdir.signUserCsr(xcsr, ahanetw, save=False) return self.aha.certdir._certToByts(cert) @@ -929,7 +929,7 @@ async def _clearInactiveSessions(self): linkiden = svc.get('svcinfo').get('online') if linkiden not in current_sessions: logger.info(f'AhaCell activecoro setting service offline [{svcname}.{network}]', - extra=await self.getLogExtra(name=svcname, netw=network)) + extra=self.getLogExtra(name=svcname, netw=network)) await self.setAhaSvcDown(svcname, linkiden, network=network) # Wait until we are cancelled or the cell is fini. @@ -1020,7 +1020,7 @@ async def addAhaSvc(self, name, info, network=None): unfo = info.get('urlinfo') logger.info(f'Adding service [{svcfull}] from [{unfo.get("scheme")}://{unfo.get("host")}:{unfo.get("port")}]', - extra=await self.getLogExtra(name=svcname, netw=svcnetw)) + extra=self.getLogExtra(name=svcname, netw=svcnetw)) svcinfo = { 'name': svcfull, @@ -1185,7 +1185,7 @@ async def delAhaSvc(self, name, network=None): name = self._getAhaName(name) svcname, svcnetw, svcfull = self._nameAndNetwork(name, network) - logger.info(f'Deleting service [{svcfull}].', extra=await self.getLogExtra(name=svcname, netw=svcnetw)) + logger.info(f'Deleting service [{svcfull}].', extra=self.getLogExtra(name=svcname, netw=svcnetw)) full = ('aha', 'svcfull', svcfull) path = ('aha', 'services', svcnetw, svcname) @@ -1228,7 +1228,7 @@ async def _setAhaSvcDown(self, name, linkiden, network=None): await self.fire(f'aha:svcdown:{svcfull}', svcname=svcname, svcnetw=svcnetw) logger.info(f'Set [{svcfull}] offline.', - extra=await self.getLogExtra(name=svcname, netw=svcnetw)) + extra=self.getLogExtra(name=svcname, netw=svcnetw)) client = self.clients.pop(svcfull, None) if client is not None: @@ -1316,7 +1316,7 @@ async def genCaCert(self, network): return fd.read().decode() logger.info(f'Generating CA certificate for {network}', - extra=await self.getLogExtra(netw=network)) + extra=self.getLogExtra(netw=network)) fut = s_coro.executor(self.certdir.genCaCert, network, save=False) pkey, cert = await fut @@ -1395,7 +1395,7 @@ async def signHostCsr(self, csrtext, signas=None, sans=None): signas = hostname.split('.', 1)[1] logger.info(f'Signing host CSR for [{hostname}], signas={signas}, sans={sans}', - extra=await self.getLogExtra(hostname=hostname, signas=signas)) + extra=self.getLogExtra(hostname=hostname, signas=signas)) pkey, cert = self.certdir.signHostCsr(xcsr, signas=signas, sans=sans) @@ -1414,7 +1414,7 @@ async def signUserCsr(self, csrtext, signas=None): signas = username.split('@', 1)[1] logger.info(f'Signing user CSR for [{username}], signas={signas}', - extra=await self.getLogExtra(name=username, signas=signas)) + extra=self.getLogExtra(name=username, signas=signas)) pkey, cert = self.certdir.signUserCsr(xcsr, signas=signas) @@ -1472,7 +1472,7 @@ async def addAhaClone(self, host, port=27492, conf=None): await self._push('aha:clone:add', clone) logger.info(f'Created AHA clone provisioning for {host} with iden {iden}', - extra=await self.getLogExtra(iden=iden, name=host, netw=network)) + extra=self.getLogExtra(iden=iden, name=host, netw=network)) return self._getProvClientUrl(iden) @@ -1555,7 +1555,7 @@ async def addAhaSvcProv(self, name, provinfo=None): iden = await self._push('aha:svc:prov:add', provinfo) logger.info(f'Created service provisioning for {name}.{netw} with iden {iden}', - extra=await self.getLogExtra(iden=iden, name=name, netw=netw)) + extra=self.getLogExtra(iden=iden, name=name, netw=netw)) return self._getProvClientUrl(iden) @@ -1661,7 +1661,7 @@ async def addAhaUserEnroll(self, name, userinfo=None, again=False): iden = await self._push('aha:enroll:add', userinfo) logger.info(f'Created user provisioning for {name} with iden {iden}', - extra=await self.getLogExtra(iden=iden, name=name)) + extra=self.getLogExtra(iden=iden, name=name)) return self._getProvClientUrl(iden) diff --git a/synapse/lib/cell.py b/synapse/lib/cell.py index d7cda728924..ff164c08636 100644 --- a/synapse/lib/cell.py +++ b/synapse/lib/cell.py @@ -48,6 +48,7 @@ import synapse.lib.certdir as s_certdir import synapse.lib.dyndeps as s_dyndeps import synapse.lib.httpapi as s_httpapi +import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack import synapse.lib.schemas as s_schemas import synapse.lib.spooled as s_spooled @@ -105,8 +106,8 @@ def wrapped(self, *args, **kwargs): raise s_exc.AuthDeny(mesg=f'User is not an admin [{self.user.name}]', user=self.user.iden, username=self.user.name) if log: - logger.info(f'Executing [{func.__qualname__}] as [{self.user.name}] with args [{args}[{kwargs}]', - extra={'synapse': {'wrapped_func': func.__qualname__}}) + extra = s_logging.getLogExtra(func=func.__qualname__, args=args, kwargs=kwargs) + logger.info(f'Admin API invoked', extra=extra) return func(self, *args, **kwargs) @@ -184,14 +185,11 @@ async def _iterBackupWork(path, linkinfo): logger.info(f'Backup streaming for [{path}] completed.') -def _iterBackupProc(path, linkinfo): +def _iterBackupProc(path, linkinfo, logconf): ''' Multiprocessing target for streaming a backup. ''' - # This logging call is okay to run since we're executing in - # our own process space and no logging has been configured. - s_common.setlogging(logger, **linkinfo.get('logconf')) - + s_logging.setup(**logconf) logger.info(f'Backup streaming process for [{path}] starting.') asyncio.run(_iterBackupWork(path, linkinfo)) @@ -222,6 +220,15 @@ async def freeze(self, timeout=30): async def resume(self): return await self.cell.resume() + @adminapi() + async def logs(self, last=100): + return await self.cell.logs(last=last) + + @adminapi() + async def watch(self, last=100): + async for item in self.cell.watch(last=last): + yield item + async def allowed(self, perm, default=None): ''' Check if the user has the requested permission. @@ -1236,6 +1243,7 @@ async def __anit__(self, dirn, conf=None, readonly=False, parent=None): ahanetw = self.conf.get('aha:network') if ahaname is not None and ahanetw is not None: self.ahasvcname = f'{ahaname}.{ahanetw}' + s_logging.setLogInfo('service', self.ahasvcname) # each cell has a guid path = s_common.genpath(self.dirn, 'cell.guid') @@ -1581,7 +1589,7 @@ async def shutdown(self, timeout=None): Execute a graceful shutdown by allowing any promoted boss tasks to complete and prevents the boss from accepting additional tasks. ''' - extra = await self.getLogExtra() + extra = self.getLogExtra() logger.warning('Graceful shutdown initiated...', extra=extra) # if we're the leader, lets see if we can handoff... @@ -1821,7 +1829,7 @@ async def _runSysctlLoop(self): mesg += 'See https://synapse.docs.vertex.link/en/latest/synapse/devopsguide.html#performance-tuning ' mesg += 'for information about these sysctl parameters.' - extra = await self.getLogExtra(sysctls=fixvals) + extra = self.getLogExtra(sysctls=fixvals) logger.warning(mesg, extra=extra) await self.waitfini(self.SYSCTL_CHECK_FREQ) @@ -2410,7 +2418,7 @@ async def demote(self, peers=None, timeout=None): logger.warning('Service demotion requested. Locating a suitable service for promotion...') - extra = await self.getLogExtra() + extra = self.getLogExtra() if not self.isactive: logger.warning('...service is not the leader. Aborting demotion.', extra=extra) @@ -2766,8 +2774,9 @@ async def _execBackupTask(self, dirn): mypipe, child_pipe = ctx.Pipe() paths = [str(slab.path) for slab in slabs] - logconf = await self._getSpawnLogConf() + proc = None + logconf = self.getLogConf() try: @@ -2825,7 +2834,7 @@ def _backupProc(pipe, srcdir, dstdir, lmdbpaths, logconf): (In a separate process) Actually do the backup ''' # This is a new process: configure logging - s_common.setlogging(logger, **logconf) + s_logging.setup(**logconf) try: with s_t_backup.capturelmdbs(srcdir) as lmdbinfo: @@ -2883,15 +2892,15 @@ async def _streamBackupArchive(self, path, user, name): mesg = 'Link not found in scope. This API must be called via a CellApi.' raise s_exc.SynErr(mesg=mesg) + logconf = self.getLogConf() linkinfo = await link.getSpawnInfo() - linkinfo['logconf'] = await self._getSpawnLogConf() await self.boss.promote('backup:stream', user=user, info={'name': name}) ctx = multiprocessing.get_context('spawn') def getproc(): - proc = ctx.Process(target=_iterBackupProc, args=(path, linkinfo)) + proc = ctx.Process(target=_iterBackupProc, args=(path, linkinfo, logconf)) proc.start() return proc @@ -3042,7 +3051,7 @@ async def addUserRule(self, iden, rule, indx=None, gateiden=None): user = await self.auth.reqUser(iden) retn = await user.addRule(rule, indx=indx, gateiden=gateiden) logger.info(f'Added rule={rule} on user {user.name} for gateiden={gateiden}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, rule=rule, gateiden=gateiden, status='MODIFY')) return retn @@ -3050,21 +3059,21 @@ async def addRoleRule(self, iden, rule, indx=None, gateiden=None): role = await self.auth.reqRole(iden) retn = await role.addRule(rule, indx=indx, gateiden=gateiden) logger.info(f'Added rule={rule} on role {role.name} for gateiden={gateiden}', - extra=await self.getLogExtra(target_role=role.iden, target_rolename=role.name, + extra=self.getLogExtra(target_role=role.iden, target_rolename=role.name, rule=rule, gateiden=gateiden, status='MODIFY')) return retn async def delUserRule(self, iden, rule, gateiden=None): user = await self.auth.reqUser(iden) logger.info(f'Removing rule={rule} on user {user.name} for gateiden={gateiden}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, rule=rule, gateiden=gateiden, status='MODIFY')) return await user.delRule(rule, gateiden=gateiden) async def delRoleRule(self, iden, rule, gateiden=None): role = await self.auth.reqRole(iden) logger.info(f'Removing rule={rule} on role {role.name} for gateiden={gateiden}', - extra=await self.getLogExtra(target_role=role.iden, target_rolename=role.name, + extra=self.getLogExtra(target_role=role.iden, target_rolename=role.name, rule=rule, gateiden=gateiden, status='MODIFY')) return await role.delRule(rule, gateiden=gateiden) @@ -3072,14 +3081,14 @@ async def setUserRules(self, iden, rules, gateiden=None): user = await self.auth.reqUser(iden) await user.setRules(rules, gateiden=gateiden) logger.info(f'Set user rules = {rules} on user {user.name} for gateiden={gateiden}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, rules=rules, gateiden=gateiden, status='MODIFY')) async def setRoleRules(self, iden, rules, gateiden=None): role = await self.auth.reqRole(iden) await role.setRules(rules, gateiden=gateiden) logger.info(f'Set role rules = {rules} on role {role.name} for gateiden={gateiden}', - extra=await self.getLogExtra(target_role=role.iden, target_rolename=role.name, + extra=self.getLogExtra(target_role=role.iden, target_rolename=role.name, rules=rules, gateiden=gateiden, status='MODIFY')) async def setRoleName(self, iden, name): @@ -3087,14 +3096,14 @@ async def setRoleName(self, iden, name): oname = role.name await role.setName(name) logger.info(f'Set name={name} from {oname} on role iden={role.iden}', - extra=await self.getLogExtra(target_role=role.iden, target_rolename=role.name, + extra=self.getLogExtra(target_role=role.iden, target_rolename=role.name, status='MODIFY')) async def setUserAdmin(self, iden, admin, gateiden=None): user = await self.auth.reqUser(iden) await user.setAdmin(admin, gateiden=gateiden) logger.info(f'Set admin={admin} for {user.name} for gateiden={gateiden}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, gateiden=gateiden, status='MODIFY')) async def addUserRole(self, useriden, roleiden, indx=None): @@ -3102,7 +3111,7 @@ async def addUserRole(self, useriden, roleiden, indx=None): role = await self.auth.reqRole(roleiden) await user.grant(roleiden, indx=indx) logger.info(f'Granted role {role.name} to user {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, target_role=role.iden, target_rolename=role.name, status='MODIFY')) @@ -3110,7 +3119,7 @@ async def setUserRoles(self, useriden, roleidens): user = await self.auth.reqUser(useriden) await user.setRoles(roleidens) logger.info(f'Set roleidens={roleidens} on user {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, roleidens=roleidens, status='MODIFY')) async def delUserRole(self, useriden, roleiden): @@ -3118,14 +3127,14 @@ async def delUserRole(self, useriden, roleiden): role = await self.auth.reqRole(roleiden) await user.revoke(roleiden) logger.info(f'Revoked role {role.name} from user {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, target_role=role.iden, target_rolename=role.name, status='MODIFY')) async def addUser(self, name, passwd=None, email=None, iden=None): user = await self.auth.addUser(name, passwd=passwd, email=email, iden=iden) logger.info(f'Added user={name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, status='CREATE')) return user.pack(packroles=True) @@ -3134,12 +3143,12 @@ async def delUser(self, iden): name = user.name await self.auth.delUser(iden) logger.info(f'Deleted user={name}', - extra=await self.getLogExtra(target_user=iden, target_username=name, status='DELETE')) + extra=self.getLogExtra(target_user=iden, target_username=name, status='DELETE')) async def addRole(self, name, iden=None): role = await self.auth.addRole(name, iden=iden) logger.info(f'Added role={name}', - extra=await self.getLogExtra(target_role=role.iden, target_rolename=role.name, status='CREATE')) + extra=self.getLogExtra(target_role=role.iden, target_rolename=role.name, status='CREATE')) return role.pack() async def delRole(self, iden): @@ -3147,26 +3156,26 @@ async def delRole(self, iden): name = role.name await self.auth.delRole(iden) logger.info(f'Deleted role={name}', - extra=await self.getLogExtra(target_role=iden, target_rolename=name, status='DELETE')) + extra=self.getLogExtra(target_role=iden, target_rolename=name, status='DELETE')) async def setUserEmail(self, useriden, email): await self.auth.setUserInfo(useriden, 'email', email) user = await self.auth.reqUser(useriden) logger.info(f'Set email={email} for {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) async def setUserName(self, useriden, name): user = await self.auth.reqUser(useriden) oname = user.name await user.setName(name) logger.info(f'Set name={name} from {oname} on user iden={user.iden}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) async def setUserPasswd(self, iden, passwd): user = await self.auth.reqUser(iden) await user.setPasswd(passwd) logger.info(f'Set password for {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) async def genUserOnepass(self, iden, duration=600000): user = await self.auth.reqUser(iden) @@ -3178,7 +3187,7 @@ async def genUserOnepass(self, iden, duration=600000): await self.auth.setUserInfo(iden, 'onepass', onepass) logger.info(f'Issued one time password for {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) return passwd @@ -3186,13 +3195,13 @@ async def setUserLocked(self, iden, locked): user = await self.auth.reqUser(iden) await user.setLocked(locked) logger.info(f'Set lock={locked} for user {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) async def setUserArchived(self, iden, archived): user = await self.auth.reqUser(iden) await user.setArchived(archived) logger.info(f'Set archive={archived} for user {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, status='MODIFY')) async def getUserDef(self, iden, packroles=True): user = self.auth.user(iden) @@ -3495,8 +3504,6 @@ def _log_web_request(self, handler: s_httpapi.Handler) -> None: if headers: enfo['headers'] = headers - extra = {'synapse': enfo} - # It is possible that a Cell implementor may register handlers which # do not derive from our Handler class, so we have to handle that. if hasattr(handler, 'web_useriden') and handler.web_useriden: @@ -3506,6 +3513,8 @@ def _log_web_request(self, handler: s_httpapi.Handler) -> None: username = handler.web_username enfo['username'] = username + extra = s_logging.getLogExtra(**enfo) + if user: mesg = f'{status} {handler.request.method} {uri} ({remote_ip}) user={user} ({username}) {request_time:.2f}ms' else: @@ -3870,7 +3879,7 @@ async def getCellApi(self, link, user, path): ''' return await self.cellapi.anit(self, link, user) - async def getLogExtra(self, **kwargs): + def getLogExtra(self, **kwargs): ''' Get an extra dictionary for structured logging which can be used as a extra argument for loggers. @@ -3880,25 +3889,16 @@ async def getLogExtra(self, **kwargs): Returns: Dict: A dictionary ''' - extra = {**kwargs} - sess = s_scope.get('sess') # type: s_daemon.Sess - user = s_scope.get('user') # type: s_auth.User - if user: - extra['user'] = user.iden - extra['username'] = user.name - elif sess and sess.user: - extra['user'] = sess.user.iden - extra['username'] = sess.user.name - return {'synapse': extra} - - async def _getSpawnLogConf(self): - conf = self.conf.get('_log_conf') - if conf: - conf = conf.copy() - else: - conf = s_common._getLogConfFromEnv() - conf['log_setup'] = False - return conf + extra = s_logging.getLogExtra(**kwargs) + if self.ahasvcname: + extra['loginfo']['service'] = self.ahasvcname + return extra + + def getLogConf(self): + logconf = s_logging.getLogConf() + if self.ahasvcname is not None: + logconf['loginfo']['service'] = self.ahasvcname + return logconf def modCellConf(self, conf): ''' @@ -4286,7 +4286,6 @@ async def _bootProvConf(self, provconf): ''' # replace our runtime config with the updated config with provconf data new_conf = self.initCellConf(self.conf) - new_conf.setdefault('_log_conf', await self._getSpawnLogConf()) # Load any opts we have and environment variables. new_conf.setConfFromOpts() @@ -4447,17 +4446,21 @@ async def initFromArgv(cls, argv, outp=None): path = s_common.genpath(opts.dirn, 'cell.yaml') mods_path = s_common.genpath(opts.dirn, 'cell.mods.yaml') - logconf = s_common.setlogging(logger, defval=opts.log_level, - structlog=opts.structured_logging) + logconf = s_logging.setup() + + levelrepr = s_const.LOG_LEVEL_INVERSE_CHOICES.get(logconf.get('level')) + logger.info(f'log level set to {levelrepr}') - logger.info(f'Starting {cls.getCellType()} version {cls.VERSTRING}, Synapse version: {s_version.verstring}', - extra={'synapse': {'svc_type': cls.getCellType(), 'svc_version': cls.VERSTRING, - 'synapse_version': s_version.verstring}}) + svcvers = cls.VERSTRING + svctype = cls.getCellType() + synvers = s_version.verstring + + extra = s_logging.getLogExtra(svctype=svctype, svcvers=svcvers, synvers=synvers) + logger.info(f'Starting {svctype} version {svcvers}, Synapse version: {synvers}', extra=extra) await cls._initBootRestore(opts.dirn) try: - conf.setdefault('_log_conf', logconf) conf.setConfFromOpts(opts) conf.setConfFromEnvs() conf.setConfFromFile(path) @@ -4466,8 +4469,6 @@ async def initFromArgv(cls, argv, outp=None): logger.exception(f'Error while bootstrapping cell config.') raise - s_processpool.set_pool_logging(logger, logconf=conf['_log_conf']) - try: cell = await cls.anit(opts.dirn, conf=conf) except: @@ -5027,7 +5028,7 @@ async def addUserApiKey(self, useriden, name, duration=None): await self._push('user:apikey:add', kdef) logger.info(f'Created HTTP API key {iden} for {user.name}, {name=}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, iden=iden, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, iden=iden, status='MODIFY')) kdef.pop('shadow') @@ -5181,7 +5182,7 @@ async def modUserApiKey(self, iden, key, valu): await self._push('user:apikey:edit', kdef.get('user'), iden, vals) logger.info(f'Updated HTTP API key {iden} for {user.name}, set {key}={valu}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, iden=iden, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, iden=iden, status='MODIFY')) kdef.pop('shadow') @@ -5215,7 +5216,7 @@ async def delUserApiKey(self, iden): user = await self.auth.reqUser(useriden) ret = await self._push('user:apikey:del', useriden, iden) logger.info(f'Deleted HTTP API key {iden} for {user.name}', - extra=await self.getLogExtra(target_user=user.iden, target_username=user.name, iden=iden, + extra=self.getLogExtra(target_user=user.iden, target_username=user.name, iden=iden, status='MODIFY')) return ret @@ -5256,7 +5257,7 @@ async def _onUserLockEvnt(self, evnt): username = sess.info.get('username', '') await self._delHttpSess(iden) logger.info(f'Invalidated HTTP session for locked user {username}', - extra=await self.getLogExtra(target_user=useriden)) + extra=self.getLogExtra(target_user=useriden)) def _makeCachedSslCtx(self, opts): @@ -5373,3 +5374,10 @@ async def resume(self): self.paused = False self.nexslock.release() + + async def logs(self, last=100): + return s_logging.logs(last=last) + + async def watch(self, last=100): + async for item in s_logging.watch(last=last): + yield item diff --git a/synapse/lib/cmd.py b/synapse/lib/cmd.py index 17de13bd285..265bfd566d9 100644 --- a/synapse/lib/cmd.py +++ b/synapse/lib/cmd.py @@ -43,7 +43,14 @@ def _print_message(self, text, fd=None): ''' self.outp.printf(text) -async def wrapmain(func): # pragma: no cover +async def wrapmain(func, logconf=None): # pragma: no cover + + if logconf is None: + logconf = {'level': 'DEBUG'} + + import synapse.lib.logging as s_logging + + s_logging.setup(**logconf) try: return await func(sys.argv[1:]) @@ -58,5 +65,5 @@ async def wrapmain(func): # pragma: no cover finally: await s_coro.await_bg_tasks(timeout=10) -def exitmain(func): # pragma: no cover - sys.exit(asyncio.run(wrapmain(func))) +def exitmain(func, logconf=None): # pragma: no cover + sys.exit(asyncio.run(wrapmain(func, logconf=logconf))) diff --git a/synapse/lib/coro.py b/synapse/lib/coro.py index a43a5af725c..b9d013283d7 100644 --- a/synapse/lib/coro.py +++ b/synapse/lib/coro.py @@ -12,6 +12,8 @@ import synapse.glob as s_glob import synapse.common as s_common +import synapse.lib.logging as s_logging + def iscoro(item): return inspect.iscoroutine(item) @@ -155,6 +157,13 @@ async def ornot(func, *args, **kwargs): return await retn return retn +def has_running_loop(): + try: + asyncio.get_running_loop() + return True + except RuntimeError: + return False + bgtasks = set() def create_task(coro): diff --git a/synapse/lib/httpapi.py b/synapse/lib/httpapi.py index 586e8fb995f..e24017705da 100644 --- a/synapse/lib/httpapi.py +++ b/synapse/lib/httpapi.py @@ -13,6 +13,7 @@ import synapse.lib.base as s_base import synapse.lib.json as s_json +import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack import synapse.lib.schemas as s_schemas @@ -225,7 +226,7 @@ def logAuthIssue(self, mesg=None, user=None, username=None, level=logging.WARNIN if username: errm = f'{errm} ({username})' enfo['username'] = username - logger.log(level, msg=errm, extra={'synapse': enfo}) + logger.log(level, msg=errm, extra=s_logging.getLogExtra(**enfo)) def sendAuthRequired(self): self.set_header('WWW-Authenticate', 'Basic realm=synapse') diff --git a/synapse/lib/lmdbslab.py b/synapse/lib/lmdbslab.py index 145a70c0d86..c41e409c1ba 100644 --- a/synapse/lib/lmdbslab.py +++ b/synapse/lib/lmdbslab.py @@ -18,6 +18,7 @@ import synapse.lib.cache as s_cache import synapse.lib.const as s_const import synapse.lib.nexus as s_nexus +import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack import synapse.lib.thishost as s_thishost import synapse.lib.thisplat as s_thisplat @@ -1806,7 +1807,7 @@ def forcecommit(self): } mesg = f'Commit with {xactopslen} items in {self!r} took {delta} ms - performance may be degraded.' - logger.warning(mesg, extra={'synapse': extra}) + logger.warning(mesg, extra=s_logging.getLogExtra(**extra)) self._initCoXact() return True diff --git a/synapse/lib/logging.py b/synapse/lib/logging.py new file mode 100644 index 00000000000..e4ef475705d --- /dev/null +++ b/synapse/lib/logging.py @@ -0,0 +1,322 @@ +import os +import sys +import asyncio +import logging +import weakref +import traceback +import collections + +import synapse.exc as s_exc +import synapse.common as s_common + +import synapse.lib.coro as s_coro +import synapse.lib.json as s_json +import synapse.lib.const as s_const +import synapse.lib.scope as s_scope +import synapse.lib.version as s_version + +logger = logging.getLogger(__name__) + +_log_wins = weakref.WeakSet() + +def excinfo(e, _seen=None): + + if _seen is None: + _seen = set() + + _seen.add(e) + + tb = [] + for path, line, func, sorc in traceback.extract_tb(e.__traceback__): + tb.append((path, line, func, sorc)) + + ret = { + 'code': e.__class__.__name__, + 'traceback': tb, + } + + if notes := getattr(e, '__notes__', None): + ret['notes'] = tuple(notes) + + if (cause := getattr(e, '__cause__', None)) is not None: + if isinstance(cause, Exception) and cause not in _seen: + ret['cause'] = excinfo(cause, _seen=_seen) + + if (context := getattr(e, '__context__', None)) is not None: + if isinstance(context, Exception) and context not in _seen: + ret['context'] = excinfo(context, _seen=_seen) + + if isinstance(e, s_exc.SynErr): + ret['info'] = e.errinfo.copy() + ret['mesg'] = ret['info'].pop('mesg', None) + + if ret.get('mesg') is None: + ret['mesg'] = str(e) + + return ret + +_glob_loginfo = {} +def setLogInfo(name, valu): + ''' + Configure global values which should be added to every log. + ''' + _glob_loginfo[name] = valu + +def getLogExtra(**kwargs): + ''' + Construct a properly enveloped log extra dictionary. + ''' + extra = {'params': kwargs, 'loginfo': {}} + return extra + +class JsonFormatter(logging.Formatter): + + def genLogInfo(self, record): + + record.message = record.getMessage() + + loginfo = { + 'message': record.message, + 'logger': { + 'name': record.name, + 'func': record.funcName, + }, + 'level': record.levelname, + 'time': self.formatTime(record, self.datefmt), + } + + loginfo.update(_glob_loginfo) + + if hasattr(record, 'loginfo'): + loginfo.update(record.loginfo) + + if (user := s_scope.get('user')) is not None: + loginfo['user'] = user.iden + loginfo['username'] = user.name + + elif (sess := s_scope.get('sess')) is not None: + loginfo['sess'] = sess.iden + if sess.user is not None: + loginfo['user'] = sess.user.iden + loginfo['username'] = sess.user.name + + if record.exc_info: + loginfo['error'] = excinfo(record.exc_info[1]) + + if not hasattr(record, 'params'): + record.params = {} + + loginfo['params'] = record.params + + # the subsequent emit() will set the event + StreamHandler._logs_fifo.append(loginfo) + StreamHandler._logs_todo.append(loginfo) + + return loginfo + + def format(self, record): + loginfo = self.genLogInfo(record) + return s_json.dumps(loginfo, default=str).decode() + +class TextFormatter(JsonFormatter): + + def __init__(self, *args, **kwargs): + kwargs['fmt'] = s_const.LOG_FORMAT + return super().__init__(*args, **kwargs) + + def format(self, record): + # this is required to send the structured data + loginfo = self.genLogInfo(record) + return logging.Formatter.format(self, record) + +class StreamHandler(logging.StreamHandler): + + _pump_task = None + _pump_event = None + _glob_handler = None + + _logs_fifo = collections.deque(maxlen=1000) + _logs_todo = collections.deque(maxlen=1000) + _text_todo = collections.deque(maxlen=1000) + + def emit(self, record): + + if self._pump_task is None: + return logging.StreamHandler.emit(self, record) + + try: + text = self.format(record) + self._text_todo.append(text) + self._pump_event.set() + + # emulating behavior of parent class + except RecursionError: # pragma: no cover + raise + + except Exception as e: # pragma: no cover + self.handleError(record) + +def _writestderr(text): + sys.stderr.write(text) + sys.stderr.flush() + +async def _pumpLogStream(): + + while True: + + try: + await StreamHandler._pump_event.wait() + + logstodo = tuple(StreamHandler._logs_todo) + texttodo = tuple(StreamHandler._text_todo) + + if not logstodo and not texttodo: + StreamHandler._pump_event.clear() + continue + + StreamHandler._logs_todo.clear() + StreamHandler._text_todo.clear() + StreamHandler._pump_event.clear() + + fulltext = '\n'.join(texttodo) + '\n' + + for wind in _log_wins: + await wind.puts(logstodo) + + await s_coro.executor(_writestderr, fulltext) + + except Exception as e: + traceback.print_exc() + +def logs(last=100): + return tuple(StreamHandler._logs_fifo)[-last:] + +async def watch(last=100): + # avoid a circular import... + import synapse.lib.queue as s_queue + async with await s_queue.Window.anit(maxsize=10000) as window: + await window.puts(logs(last=last)) + _log_wins.add(window) + async for item in window: + yield item + +_glob_logconf = {} + +def setup(**conf): + ''' + Configure synapse logging. + + NOTE: If this API is invoked while there is a running + asyncio loop, it will automatically enter async + mode and fire a task to pump log events without + blocking. + ''' + conf.update(getLogConfFromEnv()) + + if conf.get('level') is None: + conf['level'] = logging.INFO + + if conf.get('structlog') is None: + conf['structlog'] = False + + fmtclass = JsonFormatter + if not conf.get('structlog'): + fmtclass = TextFormatter + + if s_coro.has_running_loop() and StreamHandler._pump_task is None: + StreamHandler._pump_event = asyncio.Event() + StreamHandler._pump_task = s_coro.create_task(_pumpLogStream()) + + # this is used to pass things like service name + # to child processes and forked workers... + loginfo = conf.pop('loginfo', None) + if loginfo is not None: + _glob_loginfo.update(loginfo) + + _glob_logconf.clear() + _glob_logconf.update(conf) + + rootlogger = logging.getLogger() + + level = normLogLevel(conf.get('level')) + rootlogger.setLevel(level) + + if StreamHandler._glob_handler is None: + handler = StreamHandler() + handler.setFormatter(fmtclass(datefmt=conf.get('datefmt'))) + StreamHandler._glob_handler = handler + rootlogger.handlers.append(handler) + + return conf + +def reset(): + # This may be called by tests to cleanup loop specific objects + # ( it does not need to be called by in general by service fini ) + + if StreamHandler._glob_handler is not None: + rootlogger = logging.getLogger() + rootlogger.handlers.remove(StreamHandler._glob_handler) + + if StreamHandler._pump_task is not None: + StreamHandler._pump_task.cancel() + + StreamHandler._pump_task = None + StreamHandler._pump_event = None + StreamHandler._glob_handler = None + StreamHandler._text_todo.clear() + StreamHandler._logs_fifo.clear() + StreamHandler._logs_todo.clear() + + _glob_logconf.clear() + _glob_loginfo.clear() + +def getLogConf(): + logconf = _glob_logconf.copy() + logconf['loginfo'] = _glob_loginfo.copy() + return logconf + +def getLogConfFromEnv(): + + conf = {} + + if (level := os.getenv('SYN_LOG_LEVEL')) is not None: + conf['level'] = normLogLevel(level) + + if (datefmt := os.getenv('SYN_LOG_DATEFORMAT')) is not None: + conf['datefmt'] = datefmt + + if (structlog := os.getenv('SYN_LOG_STRUCT')) is not None: + conf['structlog'] = structlog.lower() in ('1', 'true') + + return conf + +def normLogLevel(valu): + ''' + Normalize a log level value to an integer. + + Args: + valu: The value to norm ( a string or integer ). + + Returns: + int: A valid log level integer. + ''' + if isinstance(valu, str): + + valu = valu.strip() + level = s_const.LOG_LEVEL_CHOICES.get(valu.upper()) + if level is not None: + return level + + try: + valu = int(valu) + except ValueError: + raise s_exc.BadArg(mesg=f'Invalid log level provided: {valu}', valu=valu) from None + + if isinstance(valu, int): + + if valu not in s_const.LOG_LEVEL_INVERSE_CHOICES: + raise s_exc.BadArg(mesg=f'Invalid log level provided: {valu}', valu=valu) + + return valu + + raise s_exc.BadArg(mesg=f'Unknown log level type: {type(valu)} {valu}', valu=valu) diff --git a/synapse/lib/process.py b/synapse/lib/process.py index 5d5d69fb985..1367152054f 100644 --- a/synapse/lib/process.py +++ b/synapse/lib/process.py @@ -12,10 +12,11 @@ import synapse.common as s_common import synapse.lib.coro as s_coro +import synapse.lib.logging as s_logging def _exectodo(que, todo, logconf): # This is a new process: configure logging - s_common.setlogging(logger, **logconf) + s_logging.setup(**logconf) func, args, kwargs = todo try: ret = func(*args, **kwargs) @@ -31,7 +32,7 @@ def _exectodo(que, todo, logconf): exc = s_exc.SynErr(mesg=mesg, name=name, info=info) que.put(exc) -async def spawn(todo, timeout=None, ctx=None, log_conf=None): +async def spawn(todo, timeout=None, ctx=None, logconf=None): ''' Run a todo (func, args, kwargs) tuple in a multiprocessing subprocess. @@ -39,7 +40,7 @@ async def spawn(todo, timeout=None, ctx=None, log_conf=None): todo (tuple): A tuple of function, ``*args``, and ``**kwargs``. timeout (int): The timeout to wait for the todo function to finish. ctx (multiprocess.Context): A optional multiprocessing context object. - log_conf (dict): An optional logging configuration for the spawned process. + logconf (dict): An optional logging configuration for the spawned process. Notes: The contents of the todo tuple must be able to be pickled for execution. @@ -50,12 +51,12 @@ async def spawn(todo, timeout=None, ctx=None, log_conf=None): ''' if ctx is None: ctx = multiprocessing.get_context('spawn') - if log_conf is None: - log_conf = {} + + if logconf is None: + logconf = {} que = ctx.Queue() - proc = ctx.Process(target=_exectodo, - args=(que, todo, log_conf)) + proc = ctx.Process(target=_exectodo, args=(que, todo, logconf)) def execspawn(): diff --git a/synapse/lib/processpool.py b/synapse/lib/processpool.py index d47e0c06856..281e29bbf54 100644 --- a/synapse/lib/processpool.py +++ b/synapse/lib/processpool.py @@ -15,6 +15,7 @@ import synapse.exc as s_exc import synapse.common as s_common +import synapse.lib.logging as s_logging import synapse.lib.process as s_process forkpool = None @@ -37,20 +38,6 @@ def _runtodo(todo): # pragma: no cover return todo[0](*todo[1], **todo[2]) -def _init_pool_worker(logger_, logconf): # prama: no cover - s_common.setlogging(logger_, **logconf) - p = multiprocessing.current_process() - logger.debug(f'Initialized new forkserver pool worker: name={p.name} pid={p.ident}') - -_pool_logconf = None -def set_pool_logging(logger_, logconf): - # This must be called before any calls to forked() and _parserforked() - global _pool_logconf - _pool_logconf = logconf - todo = s_common.todo(_init_pool_worker, logger_, logconf) - if forkpool is not None: - forkpool._initializer = _runtodo - forkpool._initargs = (todo,) async def forked(func, *args, **kwargs): ''' @@ -74,7 +61,7 @@ async def forked(func, *args, **kwargs): logger.exception(f'Shared forkserver pool is broken, fallback enabled: {func}') logger.debug(f'Forkserver pool using spawn fallback: {func}') - return await s_process.spawn(todo, log_conf=_pool_logconf) + return await s_process.spawn(todo, logconf=s_logging.getLogConf()) async def semafork(func, *args, **kwargs): ''' diff --git a/synapse/lib/scope.py b/synapse/lib/scope.py index a504cd553d6..c30698f3dbb 100644 --- a/synapse/lib/scope.py +++ b/synapse/lib/scope.py @@ -115,7 +115,7 @@ def copy(self): globscope = Scope(dict()) -def _task_scope() -> Scope: +def _task_scope(): ''' Get the current task scope. If the _syn_scope is not set, set it to a new scope that inherits from the globscope. @@ -126,7 +126,15 @@ def _task_scope() -> Scope: Returns: Scope: A Scope object. ''' - task = asyncio.current_task() + try: + task = asyncio.current_task() + except RuntimeError: + # no running loop + return None + + if task is None: + return None + scope = getattr(task, '_syn_scope', None) # no need to lock because it's per-task... @@ -140,7 +148,11 @@ def get(name, defval=None): ''' Access this task's scope with default values from glob. ''' - return _task_scope().get(name, defval=defval) + scope = _task_scope() + if scope is None: + return defval + + return scope.get(name, defval=defval) def set(name, valu): ''' diff --git a/synapse/lib/storm.py b/synapse/lib/storm.py index ca5de19c6f7..368fcb46d37 100644 --- a/synapse/lib/storm.py +++ b/synapse/lib/storm.py @@ -1402,8 +1402,6 @@ async def __anit__(self, query, snap, opts=None, user=None, root=None): self.query = query - self.spawn_log_conf = await self.snap.core._getSpawnLogConf() - self.readonly = opts.get('readonly', False) # EXPERIMENTAL: Make it safe to run untrusted queries self.model = snap.core.getDataModel() diff --git a/synapse/lib/stormlib/log.py b/synapse/lib/stormlib/log.py index efaa6f02d52..6e5b80cb07e 100644 --- a/synapse/lib/stormlib/log.py +++ b/synapse/lib/stormlib/log.py @@ -135,15 +135,17 @@ def getObjLocals(self): } async def _getExtra(self, extra=None): + if extra is None: - return extra + extra = {} + extra = await s_stormtypes.toprim(extra) if extra and not isinstance(extra, dict): mesg = f'extra provided to log call must be a dictionary compatible type. Got {extra.__class__.__name__} ' \ f'instead.' raise s_exc.BadArg(mesg=mesg, arg='extra') - extra = {'synapse': extra} - return extra + + return self.runt.snap.core.getLogExtra(**extra) @s_stormtypes.stormfunc(readonly=True) async def _logDebug(self, mesg, extra=None): diff --git a/synapse/lib/stormlib/storm.py b/synapse/lib/stormlib/storm.py index 35cfd3df559..447729966e1 100644 --- a/synapse/lib/stormlib/storm.py +++ b/synapse/lib/stormlib/storm.py @@ -60,7 +60,7 @@ async def execStormCmd(self, runt, genr): text = await s_stormtypes.tostr(self.opts.query) query = await runt.getStormQuery(text) - extra = await self.runt.snap.core.getLogExtra(text=text, view=self.runt.snap.view.iden) + extra = self.runt.snap.core.getLogExtra(text=text, view=self.runt.snap.view.iden) stormlogger.info(f'Executing storm query via storm.exec {{{text}}} as [{self.runt.user.name}]', extra=extra) async with runt.getSubRuntime(query) as subr: @@ -76,7 +76,7 @@ async def execStormCmd(self, runt, genr): text = await s_stormtypes.tostr(self.opts.query) query = await runt.getStormQuery(text) - extra = await self.runt.snap.core.getLogExtra(text=text, view=self.runt.snap.view.iden) + extra = self.runt.snap.core.getLogExtra(text=text, view=self.runt.snap.view.iden) stormlogger.info(f'Executing storm query via storm.exec {{{text}}} as [{self.runt.user.name}]', extra=extra) async with runt.getSubRuntime(query) as subr: @@ -91,7 +91,7 @@ async def execStormCmd(self, runt, genr): subr.query = query subr._initRuntVars(query) - extra = await self.runt.snap.core.getLogExtra(text=text, view=self.runt.snap.view.iden) + extra = self.runt.snap.core.getLogExtra(text=text, view=self.runt.snap.view.iden) stormlogger.info(f'Executing storm query via storm.exec {{{text}}} as [{self.runt.user.name}]', extra=extra) async for subp in subr.execute(genr=s_common.agen(item)): @@ -153,7 +153,7 @@ async def _evalStorm(self, text, cast=None): cast = await s_stormtypes.tostr(cast, noneok=True) if self.runt.snap.core.stormlog: - extra = await self.runt.snap.core.getLogExtra(text=text, view=self.runt.snap.view.iden) + extra = self.runt.snap.core.getLogExtra(text=text, view=self.runt.snap.view.iden) stormlogger.info(f'Executing storm query via $lib.storm.eval() {{{text}}} as [{self.runt.user.name}]', extra=extra) casttype = None diff --git a/synapse/lib/view.py b/synapse/lib/view.py index 8e8cb8a8f03..ced3afcff9c 100644 --- a/synapse/lib/view.py +++ b/synapse/lib/view.py @@ -968,8 +968,8 @@ async def callStorm(self, text, opts=None): return await s_stormtypes.toprim(e.item) except asyncio.CancelledError: - logger.warning(f'callStorm cancelled', - extra={'synapse': {'text': text, 'username': user.name, 'user': user.iden}}) + extra = self.core.getLogExtra(text=text) + logger.warning(f'callStorm cancelled', extra=extra) raise except (s_stormctrl.StormLoopCtrl, s_stormctrl.StormGenrCtrl) as e: @@ -978,12 +978,13 @@ async def callStorm(self, text, opts=None): else: mesg = f'Generator control statement "{e.statement}" used outside of a generator function.' logmesg = f'Error during storm execution for {{ {text} }} - {mesg}' - logger.exception(logmesg, extra={'synapse': {'text': text, 'username': user.name, 'user': user.iden}}) + extra = self.core.getLogExtra(text=text) + logger.exception(logmesg, extra=extra) raise s_exc.StormRuntimeError(mesg=mesg, statement=e.statement, highlight=e.get('highlight')) from e except Exception: - logger.exception(f'Error during callStorm execution for {{ {text} }}', - extra={'synapse': {'text': text, 'username': user.name, 'user': user.iden}}) + extra = self.core.getLogExtra(text=text) + logger.exception(f'Error during callStorm execution for {{ {text} }}', extra=extra) raise # Any other exceptions will be raised to @@ -1081,8 +1082,8 @@ async def runStorm(): pass except asyncio.CancelledError: - logger.warning('Storm runtime cancelled.', - extra={'synapse': {'text': text, 'username': user.name, 'user': user.iden}}) + extra = self.core.getLogExtra(text=text) + logger.warning('Storm runtime cancelled.', extra=extra) cancelled = True raise @@ -1097,7 +1098,10 @@ async def runStorm(): logmesg = f'Error during storm execution for {{ {text} }}' if mesg: logmesg = f'{logmesg} - {mesg}' - logger.exception(logmesg, extra={'synapse': {'text': text, 'username': user.name, 'user': user.iden}}) + + extra = self.core.getLogExtra(text=text) + logger.exception(logmesg, extra=extra) + enfo = s_common.err(e) enfo[1].pop('esrc', None) enfo[1].pop('ename', None) diff --git a/synapse/tests/test_cmds_cortex.py b/synapse/tests/test_cmds_cortex.py index 4fd5fc5b9ee..01faa33a277 100644 --- a/synapse/tests/test_cmds_cortex.py +++ b/synapse/tests/test_cmds_cortex.py @@ -29,7 +29,7 @@ async def test_storm(self): cmdr = await s_cmdr.getItemCmdr(core, outp=outp) await cmdr.runCmdLine('help storm') outp.expect(help_msg) - outp.expect('WARNING: "cmdr" is deprecated in 2.164.0 and will be removed in 3.0.0') + outp.expect('WARNING: cmdr is deprecated in 2.164.0 and will be removed in 3.0.0') outp = self.getTestOutp() cmdr = await s_cmdr.getItemCmdr(core, outp=outp) @@ -332,10 +332,9 @@ def check_locs_cleanup(cobj): cmdr = await s_cmdr.getItemCmdr(core, outp=outp) await cmdr.runCmdLine('log --on --nodes-only') cmdr.locs['log:fmt'] = 'newp' - with self.getAsyncLoggerStream('synapse.cmds.cortex', - 'Unknown encoding format: newp') as stream: + with self.getLoggerStream('synapse.cmds.cortex') as stream: await cmdr.runCmdLine('storm test:str') - self.true(await stream.wait(2)) + await stream.expect('Unknown encoding format: newp', timeout=2) await cmdr.fini() diff --git a/synapse/tests/test_common.py b/synapse/tests/test_common.py index 68045993473..81483bc1053 100644 --- a/synapse/tests/test_common.py +++ b/synapse/tests/test_common.py @@ -342,19 +342,6 @@ def test_envbool(self): self.false(s_common.envbool('SYN_FOO')) self.false(s_common.envbool('SYN_BAR')) - def test_normlog(self): - self.eq(10, s_common.normLogLevel(' 10 ')) - self.eq(10, s_common.normLogLevel(10)) - self.eq(20, s_common.normLogLevel(' inFo\n')) - with self.raises(s_exc.BadArg): - s_common.normLogLevel(100) - with self.raises(s_exc.BadArg): - s_common.normLogLevel('BEEP') - with self.raises(s_exc.BadArg): - s_common.normLogLevel('12') - with self.raises(s_exc.BadArg): - s_common.normLogLevel({'key': 'newp'}) - async def test_merggenr(self): async def asyncl(data): for item in data: @@ -416,13 +403,13 @@ async def alist(coro): retn = s_common.merggenr2([asyncl(lt) for lt in (l3, l2, l1)], reverse=True) self.eq((9, 8, 7, 6, 5, 4, 3, 2, 1), await alist(retn)) - def test_sslctx(self): + async def test_sslctx(self): with self.getTestDir(mirror='certdir') as dirn: cadir = s_common.genpath(dirn, 'cas') os.makedirs(s_common.genpath(cadir, 'newp')) - with self.getLoggerStream('synapse.common', f'Error loading {cadir}/ca.key') as stream: + with self.getLoggerStream('synapse.common') as stream: ctx = s_common.getSslCtx(cadir) - self.true(stream.wait(10)) + await stream.expect(f'Error loading {cadir}/ca.key', timeout=10) ca_subjects = {cert.get('subject') for cert in ctx.get_ca_certs()} self.isin(((('commonName', 'test'),),), ca_subjects) diff --git a/synapse/tests/test_cortex.py b/synapse/tests/test_cortex.py index 6992e7c53b2..6d0950254ea 100644 --- a/synapse/tests/test_cortex.py +++ b/synapse/tests/test_cortex.py @@ -85,10 +85,10 @@ async def test_cortex_basics(self): await core.nodes('[ inet:ipv4=1.2.3.4 :asn=99 .seen=now +#foo:score=10 ]') conf = {'modules': [('NewpModule', {})]} - warn = '''"'modules' Cortex config value" is deprecated''' - with self.getAsyncLoggerStream('synapse.common', warn) as stream: + warn = ''''modules' Cortex config value is deprecated''' + with self.getLoggerStream('synapse.common') as stream: async with self.getTestCore(dirn=dirn, conf=conf) as core: - self.true(await stream.wait(timeout=12)) + await stream.expect(warn, timeout=12) async def test_cortex_cellguid(self): iden = s_common.guid() @@ -312,12 +312,11 @@ async def testCoreJson(core): async with self.getTestCore(dirn=dirn) as core: await core.callStorm('$lib.jsonstor.set((path,), hehe)') - with self.getAsyncLoggerStream('synapse.lib.nexus') as stream: + with self.getLoggerStream('synapse.lib.nexus') as stream: async with self.getTestCore(dirn=dirn) as core: q = 'return( $lib.jsonstor.get((path,)) )' self.eq('hehe', await core.callStorm(q)) - stream.seek(0) - self.notin('Exception while replaying log', stream.read()) + self.notin('Exception while replaying log', stream.getvalue()) async def test_cortex_layer_mirror(self): @@ -1168,7 +1167,7 @@ async def test_cortex_callstorm(self): self.eq(cm.exception.get('mesg'), 'Generator control statement "stop" used outside of a generator function.') - with self.getAsyncLoggerStream('synapse.lib.view', 'callStorm cancelled') as stream: + with self.getLoggerStream('synapse.lib.view') as stream: async with core.getLocalProxy() as proxy: # async cancellation test @@ -1178,7 +1177,7 @@ async def test_cortex_callstorm(self): except asyncio.TimeoutError: logger.exception('Woohoo!') - self.true(await stream.wait(6)) + await stream.expect('callStorm cancelled', timeout=6) host, port = await core.addHttpsPort(0, host='127.0.0.1') @@ -1232,8 +1231,7 @@ async def test_cortex_storm_dmon_log(self): async with self.getTestCore() as core: - with self.getStructuredAsyncLoggerStream('synapse.storm.log', - 'Running dmon') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: iden = await core.callStorm(''' $que = $lib.queue.add(foo) @@ -1249,11 +1247,11 @@ async def test_cortex_storm_dmon_log(self): $que.get() return($ddef.iden) ''') - self.true(await stream.wait(6)) + await stream.expect('Running dmon', timeout=6) mesg = stream.jsonlines()[0] self.eq(mesg.get('message'), f'Running dmon {iden}') - self.eq(mesg.get('iden'), iden) + self.eq(mesg['params'].get('iden'), iden) opts = {'vars': {'iden': iden}} logs = await core.callStorm('return($lib.dmon.log($iden))', opts=opts) @@ -2952,10 +2950,9 @@ async def test_storm_fromtags(self): # Attempt a formpivot from a syn:tag node to a secondary property # which is not valid - with self.getAsyncLoggerStream('synapse.lib.ast', - 'Unknown time format') as stream: + with self.getLoggerStream('synapse.lib.ast') as stream: self.len(0, await core.nodes('syn:tag=foo.bar -> test:str:tick')) - self.true(await stream.wait(4)) + await stream.expect('Unknown time format', timeout=4) async def test_storm_tagtags(self): @@ -3520,22 +3517,17 @@ async def test_storm_logging(self): self.nn(view) # Storm logging - with self.getAsyncLoggerStream('synapse.storm', 'Executing storm query {help ask} as [root]') \ - as stream: + with self.getLoggerStream('synapse.storm') as stream: await alist(core.storm('help ask')) - self.true(await stream.wait(4)) + await stream.expect('Executing storm query {help ask} as [root]', timeout=4) mesg = 'Executing storm query {help foo} as [root]' - with self.getAsyncLoggerStream('synapse.storm', mesg) as stream: - await alist(core.storm('help foo', opts={'show': ('init', 'fini', 'print',)})) - self.true(await stream.wait(4)) - - with self.getStructuredAsyncLoggerStream('synapse.storm', mesg) as stream: + with self.getLoggerStream('synapse.storm') as stream: await alist(core.storm('help foo', opts={'show': ('init', 'fini', 'print',)})) - self.true(await stream.wait(4)) + await stream.expect(mesg, timeout=4) mesg = stream.jsonlines()[0] - self.eq(mesg.get('view'), view) + self.eq(mesg['params'].get('view'), view) async def test_strict(self): @@ -5939,9 +5931,9 @@ async def test_cortex_mirror_culled(self): log01 = await alist(core01.nexsroot.nexslog.iter(0)) self.eq(log00, log01) - with self.getAsyncLoggerStream('synapse.lib.nexus', 'offset is out of sync') as stream: + with self.getLoggerStream('synapse.lib.nexus') as stream: async with self.getTestCore(dirn=path02, conf={'mirror': url01}) as core02: - self.true(await stream.wait(6)) + await stream.expect('offset is out of sync', timeout=6) self.true(core02.nexsroot.isfini) # restore mirror @@ -6330,10 +6322,9 @@ async def test_cortex_storm_dmon_view(self): await asyncio.sleep(0) q = '''$q = $lib.queue.get(dmon) $q.puts((1, 3, 5))''' - with self.getAsyncLoggerStream('synapse.lib.storm', - "made ('test:int', 5)") as stream: + with self.getLoggerStream('synapse.lib.storm') as stream: await core.nodes(q) - self.true(await stream.wait(6)) + await stream.expect("made ('test:int', 5)", timeout=6) nodes = await core.nodes('test:int', opts={'view': view2_iden}) self.len(3, nodes) @@ -6360,10 +6351,9 @@ async def test_cortex_storm_dmon_view(self): await core.addStormDmon(ddef) q = '''$q = $lib.queue.get(dmon2) $q.puts((1, 3, 5))''' - with self.getAsyncLoggerStream('synapse.lib.storm', - "made ('test:str', '5')") as stream: + with self.getLoggerStream('synapse.lib.storm') as stream: await core.nodes(q) - self.true(await stream.wait(6)) + await stream.expect("made ('test:str', '5')", timeout=6) nodes = await core.nodes('test:str', opts={'view': view2_iden}) self.len(3, nodes) @@ -6377,10 +6367,9 @@ async def test_cortex_storm_dmon_view(self): with self.raises(s_exc.NoSuchView): await core.nodes('test:int', opts={'view': view2_iden}) - with self.getAsyncLoggerStream('synapse.lib.storm', - 'Dmon View is invalid. Stopping Dmon') as stream: + with self.getLoggerStream('synapse.lib.storm') as stream: async with self.getTestCore(dirn=dirn) as core: - self.true(await stream.wait(6)) + await stream.expect('Dmon View is invalid. Stopping Dmon', timeout=6) msgs = await core.stormlist('dmon.list') self.stormIsInPrint('fatal error: invalid view', msgs) @@ -7900,24 +7889,20 @@ async def test_cortex_depr_props_warning(self): await core.callStorm('model.deprecated.lock ".pdep"') # Check that we saw the warnings - stream.seek(0) - data = stream.read() - + data = stream.getvalue() self.eq(1, data.count('deprecated properties unlocked')) self.isin('deprecated properties unlocked and not in use', data) - match = regex.match(r'Detected (?P\d+) deprecated properties', data) + match = regex.search(r'Detected (?P\d+) deprecated properties', data) count = int(match.groupdict().get('count')) - here = stream.tell() + stream.clear() async with self.getTestCore(conf=conf, dirn=dirn) as core: pass # Check that the warnings are gone now - stream.seek(here) - data = stream.read() - + data = stream.getvalue() self.eq(1, data.count('deprecated properties unlocked')) self.isin(f'Detected {count - 4} deprecated properties', data) @@ -8389,7 +8374,7 @@ async def test_cortex_user_scope(self): self.eq('admin', await prox.callStorm('return( $lib.user.name() )', opts=opts)) - with self.getStructuredAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: q = 'return( ($lib.user.name(), $lib.auth.users.add(lowuser) ))' (whoami, udef) = await prox.callStorm(q, opts=opts) @@ -8400,9 +8385,9 @@ async def test_cortex_user_scope(self): mesg = [m for m in msgs if 'Added user' in m.get('message')][0] self.eq('Added user=lowuser', mesg.get('message')) self.eq('admin', mesg.get('username')) - self.eq('lowuser', mesg.get('target_username')) + self.eq('lowuser', mesg['params'].get('target_username')) - with self.getStructuredAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: q = 'auth.user.mod lowuser --admin $lib.true' msgs = [] @@ -8414,7 +8399,7 @@ async def test_cortex_user_scope(self): mesg = [m for m in msgs if 'Set admin' in m.get('message')][0] self.isin('Set admin=True for lowuser', mesg.get('message')) self.eq('admin', mesg.get('username')) - self.eq('lowuser', mesg.get('target_username')) + self.eq('lowuser', mesg['params'].get('target_username')) async def test_cortex_ext_httpapi(self): # Cortex API tests for Extended HttpAPI @@ -8612,7 +8597,7 @@ async def _hang(*args, **kwargs): # storm() q = 'inet:asn=0' qhash = s_storm.queryhash(q) - with self.getStructuredAsyncLoggerStream('synapse') as stream: + with self.getLoggerStream('synapse') as stream: msgs = await alist(core00.storm(q)) self.len(1, [m for m in msgs if m[0] == 'node']) @@ -8622,17 +8607,17 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0].get('hash'), qhash) - self.eq(msgs[0].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0]['params'].get('hash'), qhash) + self.eq(msgs[0]['params'].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1].get('hash'), qhash) - self.eq(msgs[1].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1]['params'].get('hash'), qhash) + self.eq(msgs[1]['params'].get('pool:from'), f'00.core.{ahanet}') # callStorm() q = 'inet:asn=0 return($lib.true)' qhash = s_storm.queryhash(q) - with self.getStructuredAsyncLoggerStream('synapse') as stream: + with self.getLoggerStream('synapse') as stream: self.true(await core00.callStorm(q)) data = stream.getvalue() @@ -8641,17 +8626,17 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0].get('hash'), qhash) - self.eq(msgs[0].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0]['params'].get('hash'), qhash) + self.eq(msgs[0]['params'].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1].get('hash'), qhash) - self.eq(msgs[1].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1]['params'].get('hash'), qhash) + self.eq(msgs[1]['params'].get('pool:from'), f'00.core.{ahanet}') # exportStorm() q = 'inet:asn=0' qhash = s_storm.queryhash(q) - with self.getStructuredAsyncLoggerStream('synapse') as stream: + with self.getLoggerStream('synapse') as stream: self.len(1, await alist(core00.exportStorm(q))) data = stream.getvalue() @@ -8660,17 +8645,17 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0].get('hash'), qhash) - self.eq(msgs[0].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0]['params'].get('hash'), qhash) + self.eq(msgs[0]['params'].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1].get('hash'), qhash) - self.eq(msgs[1].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1]['params'].get('hash'), qhash) + self.eq(msgs[1]['params'].get('pool:from'), f'00.core.{ahanet}') # count() q = 'inet:asn=0' qhash = s_storm.queryhash(q) - with self.getStructuredAsyncLoggerStream('synapse') as stream: + with self.getLoggerStream('synapse') as stream: self.eq(1, await core00.count(q)) data = stream.getvalue() @@ -8679,20 +8664,19 @@ async def _hang(*args, **kwargs): self.len(2, msgs) self.eq(msgs[0].get('message'), f'Offloading Storm query to mirror 01.core.{ahanet}.') - self.eq(msgs[0].get('hash'), qhash) - self.eq(msgs[0].get('mirror'), f'01.core.{ahanet}') + self.eq(msgs[0]['params'].get('hash'), qhash) + self.eq(msgs[0]['params'].get('mirror'), f'01.core.{ahanet}') self.eq(msgs[1].get('message'), f'Executing storm query {{{q}}} as [root]') - self.eq(msgs[1].get('hash'), qhash) - self.eq(msgs[1].get('pool:from'), f'00.core.{ahanet}') + self.eq(msgs[1]['params'].get('hash'), qhash) + self.eq(msgs[1]['params'].get('pool:from'), f'00.core.{ahanet}') with self.getLoggerStream('synapse') as stream: core01.boss.is_shutdown = True self.stormHasNoWarnErr(await core00.stormlist('inet:asn=0')) core01.boss.is_shutdown = False - stream.seek(0) - self.isin('Proxy for pool mirror [01.core.synapse] is shutting down. Skipping.', stream.read()) + self.isin('Proxy for pool mirror [01.core.synapse] is shutting down. Skipping.', stream.getvalue()) with patch('synapse.cortex.CoreApi.getNexsIndx', _hang): @@ -8700,8 +8684,7 @@ async def _hang(*args, **kwargs): msgs = await alist(core00.storm('inet:asn=0')) self.len(1, [m for m in msgs if m[0] == 'node']) - stream.seek(0) - data = stream.read() + data = stream.getvalue() self.notin('Offloading Storm query', data) self.isin('Timeout waiting for pool mirror [01.core.synapse] Nexus offset', data) self.notin('Timeout waiting for query mirror', data) @@ -8714,8 +8697,7 @@ async def _hang(*args, **kwargs): msgs = await alist(core00.storm('inet:asn=0')) self.len(1, [m for m in msgs if m[0] == 'node']) - stream.seek(0) - data = stream.read() + data = stream.getvalue() self.notin('Offloading Storm query', data) self.isin('Timeout waiting for pool mirror [01.core.synapse] Nexus offset', data) self.notin('Timeout waiting for query mirror', data) @@ -8726,8 +8708,7 @@ async def _hang(*args, **kwargs): msgs = await alist(core00.storm('inet:asn=0')) self.len(1, [m for m in msgs if m[0] == 'node']) - stream.seek(0) - data = stream.read() + data = stream.getvalue() self.isin('Offloading Storm query', data) self.notin('Timeout waiting for pool mirror', data) self.notin('Timeout waiting for query mirror', data) @@ -8743,8 +8724,7 @@ async def finidproxy(self, timeout=None): msgs = await alist(core00.storm('inet:asn=0')) self.len(1, [m for m in msgs if m[0] == 'node']) - stream.seek(0) - data = stream.read() + data = stream.getvalue() self.isin('Proxy for pool mirror [01.core.synapse] was shutdown. Skipping.', data) msgs = await core00.stormlist('cortex.storm.pool.set --connection-timeout 1 --sync-timeout 1 aha://pool00...') @@ -8762,8 +8742,7 @@ async def finidproxy(self, timeout=None): msgs = await alist(core00.storm('inet:asn=0')) self.len(1, [m for m in msgs if m[0] == 'node']) - stream.seek(0) - data = stream.read() + data = stream.getvalue() explog = ('Pool mirror [01.core.synapse] is too far out of sync. Skipping.') self.isin(explog, data) self.notin('Offloading Storm query', data) @@ -8772,32 +8751,28 @@ async def finidproxy(self, timeout=None): msgs = await alist(core00.storm('inet:asn=0')) self.len(1, [m for m in msgs if m[0] == 'node']) - stream.seek(0) - data = stream.read() + data = stream.getvalue() self.isin('Offloading Storm query', data) self.isin('Timeout waiting for query mirror', data) with self.getLoggerStream('synapse') as stream: self.true(await core00.callStorm('inet:asn=0 return($lib.true)')) - stream.seek(0) - data = stream.read() + data = stream.getvalue() self.isin('Offloading Storm query', data) self.isin('Timeout waiting for query mirror', data) with self.getLoggerStream('synapse') as stream: self.len(1, await alist(core00.exportStorm('inet:asn=0'))) - stream.seek(0) - data = stream.read() + data = stream.getvalue() self.isin('Offloading Storm query', data) self.isin('Timeout waiting for query mirror', data) with self.getLoggerStream('synapse') as stream: self.eq(1, await core00.count('inet:asn=0')) - stream.seek(0) - data = stream.read() + data = stream.getvalue() self.isin('Offloading Storm query', data) self.isin('Timeout waiting for query mirror', data) @@ -8820,8 +8795,7 @@ async def finidproxy(self, timeout=None): msgs = await alist(core00.storm('inet:asn=0')) self.len(1, [m for m in msgs if m[0] == 'node']) - stream.seek(0) - data = stream.read() + data = stream.getvalue() self.isin('Timeout waiting for pool mirror proxy.', data) self.isin('Pool members exhausted. Running query locally.', data) @@ -8831,29 +8805,25 @@ async def finidproxy(self, timeout=None): msgs = await alist(core00.storm('inet:asn=0')) self.len(1, [m for m in msgs if m[0] == 'node']) - stream.seek(0) - data = stream.read() + data = stream.getvalue() self.isin('Storm query mirror pool is empty, running query locally.', data) with self.getLoggerStream('synapse') as stream: self.true(await core00.callStorm('inet:asn=0 return($lib.true)')) - stream.seek(0) - data = stream.read() + data = stream.getvalue() self.isin('Storm query mirror pool is empty, running query locally.', data) with self.getLoggerStream('synapse') as stream: self.len(1, await alist(core00.exportStorm('inet:asn=0'))) - stream.seek(0) - data = stream.read() + data = stream.getvalue() self.isin('Storm query mirror pool is empty, running query locally.', data) with self.getLoggerStream('synapse') as stream: self.eq(1, await core00.count('inet:asn=0')) - stream.seek(0) - data = stream.read() + data = stream.getvalue() self.isin('Storm query mirror pool is empty, running query locally.', data) core01 = await base.enter_context(self.getTestCore(dirn=dirn01)) @@ -8868,8 +8838,7 @@ async def finidproxy(self, timeout=None): with self.getLoggerStream('synapse') as stream: self.true(await core01.callStorm('inet:asn=0 return($lib.true)')) - stream.seek(0) - data = stream.read() + data = stream.getvalue() # test that it reverts to local when referencing self self.notin('Offloading Storm query', data) self.notin('Timeout waiting for query mirror', data) @@ -8885,16 +8854,14 @@ async def finidproxy(self, timeout=None): msgs = await alist(core01.storm('inet:asn=0')) self.len(1, [m for m in msgs if m[0] == 'node']) - stream.seek(0) - data = stream.read() + data = stream.getvalue() self.isin('Storm query mirror pool is empty', data) with self.getLoggerStream('synapse') as stream: msgs = await alist(core01.storm('inet:asn=0', opts={'mirror': False})) self.len(1, [m for m in msgs if m[0] == 'node']) - stream.seek(0) - data = stream.read() + data = stream.getvalue() self.notin('Storm query mirror pool is empty', data) msgs = await core00.stormlist('cortex.storm.pool.get') @@ -8936,15 +8903,15 @@ async def test_cortex_authgate(self): core.auth.stor.set('gate:cortex:user:newp', {'iden': 'newp'}) core.auth.stor.set('gate:cortex:role:newp', {'iden': 'newp'}) - with self.getAsyncLoggerStream('synapse.cortex') as stream: + with self.getLoggerStream('synapse.cortex') as stream: async with self.getTestCore(dirn=dirn) as core: # type: s_cortex.Cortex # The cortex authgate still does nothing with self.raises(s_exc.AuthDeny) as cm: await core.nodes('[test:str=hello]', opts=aslow) - stream.seek(0) - buf = stream.read() - self.isin('(lowuser) has a rule on the "cortex" authgate', buf) - self.isin('(all) has a rule on the "cortex" authgate', buf) + + buf = stream.getvalue() + self.isin(r'(lowuser) has a rule on the \"cortex\" authgate', buf) + self.isin(r'(all) has a rule on the \"cortex\" authgate', buf) async def test_cortex_check_nexus_init(self): # This test is a simple safety net for making sure no nexus events @@ -8991,10 +8958,10 @@ async def test_cortex_safemode(self): # - storm pools # Make sure we're logging the message - with self.getStructuredAsyncLoggerStream('synapse.lib.cell', 'Booting cortex in safe-mode.') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: async with self.getTestCore(conf=safemode) as core: self.true(core.safemode) - self.true(await stream.wait(10)) + await stream.expect('Booting cortex in safe-mode.', timeout=10) msgs = stream.jsonlines() self.len(1, msgs) self.eq(msgs[0].get('message'), 'Booting cortex in safe-mode. Some functionality may be disabled.') @@ -9147,10 +9114,10 @@ async def test_cortex_safemode(self): self.len(1, events) self.eq(events, (('core:pkg:onload:skipped', {'pkg': 'foopkg', 'reason': 'safemode'}),)) - with self.getAsyncLoggerStream('synapse.cortex', 'foopkg onload output: foopkg onload') as stream: + with self.getLoggerStream('synapse.cortex') as stream: async with self.getTestCore(dirn=dirn, conf=nosafe) as core: self.false(core.safemode) - await stream.wait(timeout=10) + await stream.expect('foopkg onload output: foopkg onload', timeout=10) # Check merge tasks are not executed with self.getTestDir() as dirn: diff --git a/synapse/tests/test_daemon.py b/synapse/tests/test_daemon.py index ae28479874d..cb09ef7c856 100644 --- a/synapse/tests/test_daemon.py +++ b/synapse/tests/test_daemon.py @@ -30,14 +30,13 @@ async def test_unixsock_longpath(self): extrapath = 108 * 'A' longdirn = s_common.genpath(dirn, extrapath) listpath = f'unix://{s_common.genpath(longdirn, "sock")}' - with self.getAsyncLoggerStream('synapse.daemon', - 'exceeds OS supported UNIX socket path length') as stream: + with self.getLoggerStream('synapse.daemon') as stream: async with await s_daemon.Daemon.anit() as dmon: with self.raises(OSError): await dmon.listen(listpath) - self.true(await stream.wait(1)) + await stream.expect('exceeds OS supported UNIX socket path length', timeout=1) async def test_dmon_ready(self): @@ -84,32 +83,30 @@ async def test_dmon_errors(self): # Throw an exception when trying to handle mesg outright async with await prox.getPoolLink() as link: - with self.getAsyncLoggerStream('synapse.daemon', 'Dmon.onLinkMesg Handler: mesg=') as stream: + with self.getLoggerStream('synapse.daemon') as stream: await link.tx(31337) - self.true(await stream.wait(timeout=6)) + await stream.expect('Dmon.onLinkMesg Handler: mesg=', timeout=6) # Valid format; do not know what the message is. async with await prox.getPoolLink() as link: mesg = ('newp', {}) - emsg = "Dmon.onLinkMesg Invalid mesg: mesg=('newp', {})" - with self.getAsyncLoggerStream('synapse.daemon', emsg) as stream: + with self.getLoggerStream('synapse.daemon') as stream: await link.tx(mesg) - self.true(await stream.wait(timeout=6)) + await stream.expect("Dmon.onLinkMesg Invalid mesg: mesg=('newp', {})", timeout=6) # Invalid data casues a link to fail on rx async with await prox.getPoolLink() as link: - with self.getAsyncLoggerStream('synapse.lib.link', 'rx closed unexpectedly') as stream: + with self.getLoggerStream('synapse.lib.link') as stream: byts = b'\x16\x03\x01\x02\x00\x01\x00\x01\xfc\x03\x03\xa6\xa3D\xd5\xdf%\xac\xa9\x92\xc3' await link.send(byts) - self.true(await stream.wait(timeout=6)) + await stream.expect('rx closed unexpectedly', timeout=6) # bad t2:init message async with await prox.getPoolLink() as link: mesg = ('t2:init', {}) - emsg = "Error on t2:init:" - with self.getAsyncLoggerStream('synapse.daemon', emsg) as stream: + with self.getLoggerStream('synapse.daemon') as stream: await link.tx(mesg) - self.true(await stream.wait(timeout=6)) + await stream.expect('Error on t2:init:', timeout=6) class SvcApi(s_cell.CellApi, s_stormsvc.StormSvc): _storm_svc_name = 'foo' diff --git a/synapse/tests/test_datamodel.py b/synapse/tests/test_datamodel.py index 405af7c3091..33e9f76809f 100644 --- a/synapse/tests/test_datamodel.py +++ b/synapse/tests/test_datamodel.py @@ -157,12 +157,11 @@ async def test_datamodel_dynamics(self): modl.addIface('depr:iface', {'deprecated': True}) - with self.getAsyncLoggerStream('synapse.datamodel') as dstream: + with self.getLoggerStream('synapse.datamodel') as stream: modl.addType('foo:bar', 'int', {}, {'interfaces': ('depr:iface',)}) modl.addForm('foo:bar', {}, ()) - dstream.seek(0) - self.isin('Form foo:bar depends on deprecated interface depr:iface', dstream.read()) + self.isin('Form foo:bar depends on deprecated interface depr:iface', stream.getvalue()) async def test_datamodel_del_prop(self): @@ -226,38 +225,35 @@ async def test_model_deprecation(self): with self.getTestDir() as dirn: - with self.getAsyncLoggerStream('synapse.lib.types') as tstream, \ - self.getAsyncLoggerStream('synapse.datamodel') as dstream: + with self.getLoggerStream('synapse.lib.types') as tstream, \ + self.getLoggerStream('synapse.datamodel') as dstream: + core = await s_cortex.Cortex.anit(dirn, conf) - dstream.expect('universal property .udep is using a deprecated type') - dstream.expect('type test:dep:easy is based on a deprecated type test:dep:easy') - dstream.noexpect('type test:dep:comp field str uses a deprecated type test:dep:easy') - tstream.expect('Array type test:dep:array is based on a deprecated type test:dep:easy') + await dstream.expect('universal property .udep is using a deprecated type') + await dstream.expect('type test:dep:easy is based on a deprecated type test:dep:easy') + await tstream.expect('Array type test:dep:array is based on a deprecated type test:dep:easy') + self.notin('type test:dep:comp field str uses a deprecated type test:dep:easy', dstream.getvalue()) - # Using deprecated forms and props is warned to the user - msgs = await core.stormlist('[test:dep:easy=test1 :guid=(t1,)] [:guid=(t2,)]') - self.stormIsInWarn('The form test:dep:easy is deprecated', msgs) - self.stormIsInWarn('The property test:dep:easy:guid is deprecated or using a deprecated type', msgs) + # Using deprecated forms and props is warned to the user + msgs = await core.stormlist('[test:dep:easy=test1 :guid=(t1,)] [:guid=(t2,)]') + self.stormIsInWarn('The form test:dep:easy is deprecated', msgs) + self.stormIsInWarn('The property test:dep:easy:guid is deprecated or using a deprecated type', msgs) - msgs = await core.stormlist('[test:str=tehe .pdep=beep]') - self.stormIsInWarn('property test:str.pdep is deprecated', msgs) + msgs = await core.stormlist('[test:str=tehe .pdep=beep]') + self.stormIsInWarn('property test:str.pdep is deprecated', msgs) - # Extended props, custom universals and tagprops can all trigger deprecation notices - mesg = 'tag property depr is using a deprecated type test:dep:easy' - with self.getAsyncLoggerStream('synapse.datamodel', mesg) as dstream: + # Extended props, custom universals and tagprops can all trigger deprecation notices await core.addTagProp('depr', ('test:dep:easy', {}), {}) - self.true(await dstream.wait(6)) + await dstream.expect('tag property depr is using a deprecated type test:dep:easy', timeout=6) - mesg = 'universal property ._test is using a deprecated type test:dep:easy' - with self.getAsyncLoggerStream('synapse.datamodel', mesg) as dstream: await core.addUnivProp('_test', ('test:dep:easy', {}), {}) - self.true(await dstream.wait(6)) + await dstream.expect('universal property ._test is using a deprecated type test:dep:easy', timeout=6) mesg = 'extended property test:str:_depr is using a deprecated type test:dep:easy' - with self.getAsyncLoggerStream('synapse.cortex', mesg) as cstream: + with self.getLoggerStream('synapse.cortex') as cstream: await core.addFormProp('test:str', '_depr', ('test:dep:easy', {}), {}) - self.true(await cstream.wait(6)) + await cstream.expect(mesg, timeout=6) # Deprecated ctor information propagates upward to types and forms msgs = await core.stormlist('[test:dep:str=" test" :beep=" boop "]') @@ -268,9 +264,9 @@ async def test_model_deprecation(self): # Restarting the cortex warns again for various items that it loads from the hive # with deprecated types in them. This is a coverage test for extended properties. - with self.getAsyncLoggerStream('synapse.cortex', mesg) as cstream: + with self.getLoggerStream('synapse.cortex', mesg) as cstream: async with await s_cortex.Cortex.anit(dirn, conf) as core: - self.true(await cstream.wait(6)) + await cstream.expect(mesg, timeout=6) async def test_datamodel_getmodeldefs(self): ''' @@ -322,7 +318,7 @@ async def test_model_invalid_comp_types(self): with self.getLoggerStream('synapse.datamodel') as stream: s_datamodel.Model().addDataModels([badmodel]) - stream.expect(mutmesg) + await stream.expect(mutmesg, timeout=1) # Comp type with an indirect data field (and out of order definitions) badmodel = ('badmodel', { @@ -343,7 +339,7 @@ async def test_model_invalid_comp_types(self): with self.getLoggerStream('synapse.datamodel') as stream: s_datamodel.Model().addDataModels([badmodel]) - stream.expect(mutmesg) + await stream.expect(mutmesg, timeout=1) # Comp type with double indirect data field badmodel = ('badmodel', { @@ -365,7 +361,7 @@ async def test_model_invalid_comp_types(self): with self.getLoggerStream('synapse.datamodel') as stream: s_datamodel.Model().addDataModels([badmodel]) - stream.expect(mutmesg) + await stream.expect(mutmesg, timeout=1) # API direct typeopts = { @@ -377,7 +373,7 @@ async def test_model_invalid_comp_types(self): with self.getLoggerStream('synapse.datamodel') as stream: s_datamodel.Model().addType('_bad:comp', 'comp', typeopts, {}) - stream.expect(mutmesg) + await stream.expect(mutmesg, timeout=1) # Non-existent types typeopts = { @@ -389,7 +385,7 @@ async def test_model_invalid_comp_types(self): with self.getLoggerStream('synapse.datamodel') as stream: s_datamodel.Model().addType('_bad:comp', 'comp', typeopts, {}) - stream.expect('The _bad:comp field hehe is declared as a type (newp) that does not exist.') + await stream.expect('The _bad:comp field hehe is declared as a type (newp) that does not exist.', timeout=1) # deprecated types badmodel = ('badmodel', { @@ -410,7 +406,7 @@ async def test_model_invalid_comp_types(self): with self.getLoggerStream('synapse.datamodel') as stream: s_datamodel.Model().addDataModels([badmodel]) - stream.expect('The type _bad:comp field hehe uses a deprecated type depr:type.') + await stream.expect('The type _bad:comp field hehe uses a deprecated type depr:type.', timeout=1) # Comp type not extended does not gen mutable warning badmodel = ('badmodel', { @@ -430,7 +426,8 @@ async def test_model_invalid_comp_types(self): with self.getLoggerStream('synapse.datamodel') as stream: s_datamodel.Model().addDataModels([badmodel]) - stream.noexpect('Comp types with mutable fields') + + self.notin('Comp types iwth mutable fields', stream.getvalue()) # Comp type not extended does not gen deprecated warning badmodel = ('badmodel', { @@ -451,7 +448,7 @@ async def test_model_invalid_comp_types(self): with self.getLoggerStream('synapse.datamodel') as stream: s_datamodel.Model().addDataModels([badmodel]) - stream.noexpect('uses a deprecated type') + self.notin('uses a deprecated type', stream.getvalue()) async def test_datamodel_edges(self): diff --git a/synapse/tests/test_init.py b/synapse/tests/test_init.py deleted file mode 100644 index 01a8ba75410..00000000000 --- a/synapse/tests/test_init.py +++ /dev/null @@ -1,18 +0,0 @@ - -import synapse.tests.utils as s_t_utils - -class InitTest(s_t_utils.SynTest): - pass - - ''' - def test_init_modules(self): - os.environ['SYN_MODULES'] = 'fakenotrealmod , badnothere,math' - msg = 'SYN_MODULES failed: badnothere (NoSuchDyn: name=\'badnothere\')' - with self.getLoggerStream('synapse', msg) as stream: - imp.reload(synapse) - self.true(stream.wait(10)) - - stream.seek(0) - self.isin(msg, stream.read()) - self.isin(('math', 2.0, None), synapse.lib.modules.call('sqrt', 4)) - ''' diff --git a/synapse/tests/test_lib_agenda.py b/synapse/tests/test_lib_agenda.py index 959b830da85..8779158e29d 100644 --- a/synapse/tests/test_lib_agenda.py +++ b/synapse/tests/test_lib_agenda.py @@ -395,17 +395,17 @@ def looptime(): # Ensure structured logging captures the cron iden value core.stormlog = True - with self.getStructuredAsyncLoggerStream('synapse.storm') as stream: + with self.getLoggerStream('synapse.storm') as stream: unixtime = datetime.datetime(year=2019, month=2, day=13, hour=10, minute=16, tzinfo=tz.utc).timestamp() self.eq((12, 'bar'), await asyncio.wait_for(core.callStorm('return($lib.queue.gen(visi).pop(wait=$lib.true))'), timeout=5)) core.stormlog = False msgs = stream.jsonlines() - msgs = [m for m in msgs if m['text'] == '$lib.queue.gen(visi).put(bar)'] + msgs = [m for m in msgs if m['params']['text'] == '$lib.queue.gen(visi).put(bar)'] self.gt(len(msgs), 0) for m in msgs: - self.eq(m.get('cron'), appt.iden) + self.eq(m['params'].get('cron'), appt.iden) self.eq(1, appt.startcount) @@ -419,14 +419,15 @@ def looptime(): await visi.setLocked(True) - with self.getLoggerStream('synapse.lib.agenda', 'locked') as stream: + with self.getLoggerStream('synapse.lib.agenda') as stream: unixtime = datetime.datetime(year=2019, month=2, day=16, hour=10, minute=16, tzinfo=tz.utc).timestamp() # pump the ioloop via sleep(0) until the log message appears - while not stream.wait(0.1): + while 'locked' not in stream.getvalue(): await asyncio.sleep(0) - await core.nexsroot.waitOffs(strt + 4) + while not await core.nexsroot.waitOffs(strt + 4): + await asyncio.sleep(0) self.eq(2, appt.startcount) @@ -737,16 +738,16 @@ async def test_agenda_fatal_run(self): # Force the cron to run. - with self.getAsyncLoggerStream('synapse.lib.agenda', 'Agenda error running appointment ') as stream: + with self.getLoggerStream('synapse.lib.agenda') as stream: core.agenda._addTickOff(55) - self.true(await stream.wait(timeout=12)) + await stream.expect('Agenda error running appointment', timeout=12) await core.addUserRule(user, (True, ('storm',))) await core.addUserRule(user, (True, ('view', 'read')), gateiden=fork) - with self.getAsyncLoggerStream('synapse.storm.log', 'I am a cron job') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: core.agenda._addTickOff(60) - self.true(await stream.wait(timeout=12)) + await stream.expect('I am a cron job', timeout=12) async def test_agenda_mirror_realtime(self): with self.getTestDir() as dirn: @@ -917,16 +918,16 @@ async def test_agenda_promotions(self): tasks01 = await core01.callStorm('return($lib.ps.list())') self.len(0, tasks01) - with self.getLoggerStream('synapse.lib.agenda', mesg='name=CRON99') as stream: + with self.getLoggerStream('synapse.lib.agenda') as stream: # Promote and inspect cortex status await core01.promote(graceful=True) self.false(core00.isactive) self.true(core01.isactive) + await stream.expect('name=CRON99', timeout=6) - stream.seek(0) - data = stream.read() + data = stream.getvalue() for ii in range(NUMJOBS): - self.isin(f' name=CRON{ii} with result "cancelled" took ', data) + self.isin(f' name=CRON{ii} with result \\"cancelled\\" took ', data) # Sync the (now) follower so the isrunning status gets updated to false on both cortexes await core00.sync() @@ -1106,11 +1107,11 @@ async def task(): async def test_agenda_warnings(self): async with self.getTestCore() as core: - with self.getAsyncLoggerStream('synapse.lib.agenda', 'issued warning: oh hai') as stream: + with self.getLoggerStream('synapse.lib.agenda') as stream: q = '$lib.warn("oh hai")' msgs = await core.stormlist('cron.at --now $q', opts={'vars': {'q': q}}) self.stormHasNoWarnErr(msgs) - self.true(await stream.wait(timeout=6)) + await stream.expect('issued warning: oh hai', timeout=6) async def test_agenda_graceful_promotion_with_running_cron(self): @@ -1142,8 +1143,8 @@ async def test_agenda_graceful_promotion_with_running_cron(self): async with self.getTestCore(conf=conf01) as core01: - with self.getAsyncLoggerStream('synapse.storm.log', 'I AM A ERROR LOG MESSAGE') as stream: - self.true(await stream.wait(timeout=6)) + with self.getLoggerStream('synapse.storm.log') as stream: + await stream.expect('I AM A ERROR LOG MESSAGE', timeout=6) cron = await core00.callStorm('return($lib.cron.list())') self.len(1, cron) @@ -1260,9 +1261,9 @@ async def test_cron_at_mirror_cleanup(self): msgs = await core00.stormlist('cron.at --minute +1 { $lib.log.info(cronran) }') await core01.sync() - with self.getAsyncLoggerStream('synapse.storm.log', 'cronran') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: core00.agenda._addTickOff(60) - self.true(await stream.wait(timeout=12)) + await stream.expect('cronran', timeout=12) await core01.sync() @@ -1309,19 +1310,19 @@ async def test_cron_at_mirror_cleanup(self): core00.agenda.apptdefs.set(guid, apptdef) core01.agenda.apptdefs.set(guid, apptdef) - with self.getAsyncLoggerStream('synapse.lib.agenda', 'This appointment will be removed') as stream: + with self.getLoggerStream('synapse.lib.agenda') as stream: await core01.fini() core01 = await aha.enter_context(self.getTestCore(dirn=dirn01)) - self.true(await stream.wait(timeout=12)) + await stream.expect('This appointment will be removed', timeout=12) # Mirror warns about the invalid appointment but does not remove it self.nn(core01.agenda.apptdefs.get(guid)) self.nn(await core01.getAuthGate(guid)) - with self.getAsyncLoggerStream('synapse.lib.agenda', 'Removing invalid appointment') as stream: + with self.getLoggerStream('synapse.lib.agenda') as stream: await core00.fini() core00 = await aha.enter_context(self.getTestCore(dirn=dirn00)) - self.true(await stream.wait(timeout=12)) + await stream.expect('Removing invalid appointment', timeout=12) await core01.sync() @@ -1348,23 +1349,24 @@ async def test_cron_at_mirror_cleanup(self): core00.agenda.apptdefs.set(guid, apptdef) core01.agenda.apptdefs.set(guid, apptdef) - with self.getAsyncLoggerStream('synapse.lib.agenda', 'Removing invalid appointment') as stream: + with self.getLoggerStream('synapse.lib.agenda') as stream: + await core00.fini() core00 = await aha.enter_context(self.getTestCore(dirn=dirn00)) await core01.fini() core01 = await aha.enter_context(self.getTestCore(dirn=dirn01)) - self.true(await stream.wait(timeout=12)) + await stream.expect('Removing invalid appointment', timeout=12) await core01.sync() self.none(core00.agenda.apptdefs.get(guid)) self.none(core01.agenda.apptdefs.get(guid)) - with self.getAsyncLoggerStream('synapse.storm.log', 'I AM A ERROR') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: q = "cron.at --now ${ while((true)) { $lib.log.error('I AM A ERROR') $lib.time.sleep(6) } }" await core00.nodes(q) - self.true(await stream.wait(timeout=12)) + await stream.expect('I AM A ERROR', timeout=12) await core01.sync() await core01.promote(graceful=True) diff --git a/synapse/tests/test_lib_aha.py b/synapse/tests/test_lib_aha.py index 0697984da65..57b9cb5f5dd 100644 --- a/synapse/tests/test_lib_aha.py +++ b/synapse/tests/test_lib_aha.py @@ -167,9 +167,9 @@ async def test_lib_aha_offon(self): # Tear down the Aha cell. await aha.__aexit__(None, None, None) - with self.getAsyncLoggerStream('synapse.lib.aha', f'Set [0.cell.synapse] offline.') as stream: + with self.getLoggerStream('synapse.lib.aha') as stream: async with self.getTestAha(dirn=dirn) as aha: - self.true(await asyncio.wait_for(stream.wait(), timeout=12)) + await stream.expect('Set [0.cell.synapse] offline.', timeout=12) svc = await aha.getAhaSvc('0.cell...') self.notin('online', svc.get('svcinfo')) @@ -691,21 +691,19 @@ async def test_lib_aha_provision(self): s_common.yamlsave(overconf, axonpath, 'cell.mods.yaml') # force a re-provision... (because the providen is different) - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Provisioning axon from AHA service') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: async with await s_axon.Axon.initFromArgv((axonpath,)) as axon: - self.true(await stream.wait(6)) - self.ne(axon.conf.get('dmon:listen'), - 'tcp://0.0.0.0:0') + await stream.expect('Provisioning axon from AHA service', timeout=6) + self.ne(axon.conf.get('dmon:listen'), 'tcp://0.0.0.0:0') + overconf2 = s_common.yamlload(axonpath, 'cell.mods.yaml') self.eq(overconf2, {'nexslog:async': True}) # tests startup logic that recognizes it's already done - with self.getAsyncLoggerStream('synapse.lib.cell', ) as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: async with await s_axon.Axon.initFromArgv((axonpath,)) as axon: pass - stream.seek(0) - self.notin('Provisioning axon from AHA service', stream.read()) + self.notin('Provisioning axon from AHA service', stream.getvalue()) async with await s_axon.Axon.initFromArgv((axonpath,)) as axon: # testing second run... diff --git a/synapse/tests/test_lib_boss.py b/synapse/tests/test_lib_boss.py index d3131de9a3c..d0e7cefc985 100644 --- a/synapse/tests/test_lib_boss.py +++ b/synapse/tests/test_lib_boss.py @@ -50,8 +50,7 @@ async def testloop(): self.len(1, boss.ps()) - with self.getAsyncLoggerStream('synapse.lib.boss', - 'Iden specified for existing task') as stream: + with self.getLoggerStream('synapse.lib.boss') as stream: iden = s_common.guid() @@ -60,7 +59,7 @@ async def double_promote(): await boss.promote('double', root, taskiden=iden + iden) coro = boss.schedCoro(double_promote()) - self.true(await stream.wait(timeout=6)) + await stream.expect('Iden specified for existing task', timeout=6) await coro async with boss.shutdown_lock: diff --git a/synapse/tests/test_lib_cell.py b/synapse/tests/test_lib_cell.py index 353d6793695..6690455780d 100644 --- a/synapse/tests/test_lib_cell.py +++ b/synapse/tests/test_lib_cell.py @@ -7,6 +7,7 @@ import signal import socket import asyncio +import logging import tarfile import collections import multiprocessing @@ -32,6 +33,7 @@ import synapse.lib.nexus as s_nexus import synapse.lib.config as s_config import synapse.lib.certdir as s_certdir +import synapse.lib.logging as s_logging import synapse.lib.msgpack as s_msgpack import synapse.lib.version as s_version import synapse.lib.lmdbslab as s_lmdbslab @@ -42,6 +44,8 @@ import synapse.tests.utils as s_t_utils +logger = logging.getLogger(__name__) + # Defective versions of spawned backup processes def _sleeperProc(pipe, srcdir, dstdir, lmdbpaths, logconf): time.sleep(3.0) @@ -65,7 +69,7 @@ async def _iterBackupEOF(path, linkinfo): link.writer.write_eof() await link.fini() -def _backupEOF(path, linkinfo): +def _backupEOF(path, linkinfo, logconf): asyncio.run(_iterBackupEOF(path, linkinfo)) def lock_target(dirn, evt1): # pragma: no cover @@ -516,13 +520,13 @@ async def test_cell_auth(self): # @adminApi methods are allowed self.true(await proxy.adminOnly()) - mesg = "Executing [EchoAuthApi.adminOnlyLog] as [root] with args [(1, 2)[{'three': 4}]" - with self.getStructuredAsyncLoggerStream('synapse.lib.cell', mesg) as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: self.eq(await proxy.adminOnlyLog(1, 2, three=4), (1, 2, {'three': 4})) - self.true(await stream.wait(timeout=10)) + mesg = "Executing [EchoAuthApi.adminOnlyLog] as [root] with args [(1, 2)[{'three': 4}]" + await stream.expect('Admin API invoked', timeout=10) msgs = stream.jsonlines() self.len(1, msgs) - self.eq('EchoAuthApi.adminOnlyLog', msgs[0].get('wrapped_func')) + self.eq('EchoAuthApi.adminOnlyLog', msgs[0]['params'].get('func')) visi = await echo.auth.addUser('visi') await visi.setPasswd('foo') @@ -850,10 +854,10 @@ async def test_longpath(self): with self.getTestDir() as dirn: extrapath = 108 * 'A' longdirn = s_common.genpath(dirn, extrapath) - with self.getAsyncLoggerStream('synapse.lib.cell', 'LOCAL UNIX SOCKET WILL BE UNAVAILABLE') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: async with self.getTestCell(s_cell.Cell, dirn=longdirn) as cell: self.none(cell.dmon.addr) - self.true(await stream.wait(1)) + await stream.expect('LOCAL UNIX SOCKET WILL BE UNAVAILABLE', timeout=1) async def test_cell_setuser(self): @@ -1368,7 +1372,7 @@ async def test_cell_confprint(self): } s_common.yamlsave(conf, dirn, 'cell.yaml') - with self.getAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: async with await s_cell.Cell.initFromArgv([dirn]): pass stream.seek(0) @@ -1382,7 +1386,7 @@ async def test_cell_confprint(self): } s_common.yamlsave(conf, dirn, 'cell.yaml') - with self.getAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: async with await s_cell.Cell.initFromArgv([dirn]): pass stream.seek(0) @@ -1435,23 +1439,21 @@ async def test_initargv_failure(self): self.skip('Test requires /dev/null to exist.') async with self.withSetLoggingMock(): - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Error starting cell at /dev/null') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: with self.raises(FileExistsError): async with await s_cell.Cell.initFromArgv(['/dev/null']): pass - self.true(await stream.wait(timeout=6)) + await stream.expect('Error starting cell at /dev/null', timeout=6) # Bad configs can also cause a failure. with self.getTestDir() as dirn: - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Error while bootstrapping cell config') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: with self.raises(s_exc.BadConfValu) as cm: with self.setTstEnvars(SYN_CELL_AUTH_PASSWD="true"): # interpreted as a yaml bool true async with await s_cell.Cell.initFromArgv([dirn, ]): pass self.eq(cm.exception.get('name'), 'auth:passwd') - self.true(await stream.wait(timeout=6)) + await stream.expect('Error while bootstrapping cell config', timeout=6) async def test_cell_backup(self): @@ -2218,10 +2220,9 @@ async def test_mirror_badiden(self): async with self.getTestCell(s_cell.Cell, dirn=path01, conf={'nexslog:en': True}) as cell01: pass - with self.getAsyncLoggerStream('synapse.lib.nexus', - 'has different iden') as stream: + with self.getLoggerStream('synapse.lib.nexus') as stream: async with self.getTestCell(s_cell.Cell, dirn=path01, conf=conf01) as cell01: - await stream.wait(timeout=2) + await stream.expect('has different iden', timeout=2) self.true(await cell01.nexsroot.waitfini(6)) async def test_backup_restore_base(self): @@ -2256,16 +2257,15 @@ async def test_backup_restore_base(self): with self.setTstEnvars(SYN_RESTORE_HTTPS_URL=furl): with self.getTestDir() as cdir: # Restore works - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Restoring cortex from SYN_RESTORE_HTTPS_URL') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: argv = [cdir, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as core: - self.true(await stream.wait(6)) + await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL', timeout=6) self.len(1, await core.nodes('inet:ipv4=1.2.3.4')) self.true(core.conf.get('storm:log')) # Turning the service back on with the restore URL is fine too. - with self.getAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: argv = [cdir, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as core: self.len(1, await core.nodes('inet:ipv4=1.2.3.4')) @@ -2293,11 +2293,10 @@ async def test_backup_restore_base(self): # all of the existing content of that directory. Remove the restore.done file # to force the restore from happening again. os.unlink(rpath) - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Removing existing') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: argv = [cdir, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as core: - self.true(await stream.wait(6)) + await stream.expect('Removing existing', timeout=6) self.len(1, await core.nodes('inet:ipv4=1.2.3.4')) # Restore a backup which has an existing restore.done file in it - that marker file will get overwritten @@ -2305,11 +2304,10 @@ async def test_backup_restore_base(self): with self.setTstEnvars(SYN_RESTORE_HTTPS_URL=furl2): with self.getTestDir() as cdir: # Restore works - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Restoring cortex from SYN_RESTORE_HTTPS_URL') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: argv = [cdir, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as core: - self.true(await stream.wait(6)) + await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL', timeout=6) self.len(1, await core.nodes('inet:ipv4=1.2.3.4')) rpath = s_common.genpath(cdir, 'restore.done') @@ -2401,11 +2399,10 @@ async def test_backup_restore_aha(self): with self.setTstEnvars(SYN_RESTORE_HTTPS_URL=furl, SYN_CORTEX_AHA_PROVISION=purl): # Restore works - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Restoring cortex from SYN_RESTORE_HTTPS_URL') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: argv = [bdr0, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as bcree00: - self.true(await stream.wait(6)) + await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL', timeout=6) self.len(1, await bcree00.nodes('inet:asn=0')) self.len(1, await bcree00.nodes('[inet:asn=1234]')) @@ -2422,7 +2419,7 @@ async def test_backup_restore_aha(self): SYN_CORTEX_AHA_PROVISION=purl): argv = [bdr1, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as bcree01: - self.true(await stream.wait(6)) + await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL', timeout=6) self.true(bcree00.isactive) self.false(bcree01.isactive) @@ -2497,11 +2494,10 @@ async def test_backup_restore_double_promote_aha(self): with self.setTstEnvars(SYN_RESTORE_HTTPS_URL=furl, SYN_CORTEX_AHA_PROVISION=purl): # Restore works - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Restoring cortex from SYN_RESTORE_HTTPS_URL') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: argv = [bdr0, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as bcree00: - self.true(await stream.wait(6)) + await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL', timeout=6) self.len(1, await bcree00.nodes('inet:asn=0')) self.len(1, await bcree00.nodes('[inet:asn=1234]')) @@ -2518,7 +2514,7 @@ async def test_backup_restore_double_promote_aha(self): SYN_CORTEX_AHA_PROVISION=purl): argv = [bdr1, '--https', '0', '--telepath', 'tcp://127.0.0.1:0'] async with await s_cortex.Cortex.initFromArgv(argv) as bcree01: - self.true(await stream.wait(6)) + await stream.expect('Restoring cortex from SYN_RESTORE_HTTPS_URL', timeout=6) self.true(bcree00.isactive) self.false(bcree01.isactive) @@ -2629,9 +2625,10 @@ async def wrapDelWriteHold(root, reason): # This tmp_reason assertion seems counter-intuitive at first; but it's really # asserting that the message which was incorrectly being logged is no longer logged. log_enable_writes = f'Free space on {core.dirn} above minimum threshold' - with self.getAsyncLoggerStream('synapse.lib.cell', log_enable_writes) as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: await core.nexsroot.addWriteHold(tmp_reason := 'something else') - self.false(await stream.wait(1)) + with self.raises(s_exc.SynErr): + await stream.expect(log_enable_writes, timeout=1) stream.seek(0) self.eq(stream.read(), '') @@ -2725,10 +2722,9 @@ def spaceexc(self): viewiden = view.get('iden') opts = {'view': viewiden} - with self.getAsyncLoggerStream('synapse.lib.lmdbslab', - 'Error during slab resize callback - foo') as stream: + with self.getLoggerStream('synapse.lib.lmdbslab') as stream: msgs = await core.stormlist('for $x in $lib.range(200) {[test:int=$x]}', opts=opts) - self.true(await stream.wait(timeout=30)) + await stream.expect('Error during slab resize callback - foo', timeout=30) async with self.getTestCore() as core: @@ -2762,7 +2758,7 @@ async def test_cell_onboot_optimize(self): lmdbfile = s_common.genpath(dirn, 'layers', layriden, 'layer_v2.lmdb', 'data.mdb') stat00 = os.stat(lmdbfile) - with self.getAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: conf = {'onboot:optimize': True} async with self.getTestCore(dirn=dirn, conf=conf) as core: @@ -2785,7 +2781,7 @@ def diffdev(path): return real with mock.patch('os.stat', diffdev): - with self.getAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: conf = {'onboot:optimize': True} async with self.getTestCore(dirn=dirn, conf=conf) as core: @@ -2800,7 +2796,7 @@ def diffdev(path): async with self.getTestCore(dirn=dirn) as core: await core.runBackup() - with self.getAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: conf = {'onboot:optimize': True} async with self.getTestCore(dirn=dirn, conf=conf) as core: @@ -3241,7 +3237,7 @@ async def test_cell_nexus_compat(self): async def test_cell_hive_migration(self): - with self.getAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: async with self.getRegrCore('hive-migration') as core: visi = await core.auth.getUserByName('visi') @@ -3309,7 +3305,7 @@ async def test_cell_check_sysctl(self): sysvals['vm.dirty_writeback_centisecs'] += 1 # Detect and report incorrect values - with self.getStructuredAsyncLoggerStream('synapse.lib.cell') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: with mock.patch.object(s_cell.Cell, 'SYSCTL_VALS', sysvals): async with self.getTestCore(conf={'health:sysctl:checks': True}): pass @@ -3322,7 +3318,7 @@ async def test_cell_check_sysctl(self): mesg += 'See https://synapse.docs.vertex.link/en/latest/synapse/devopsguide.html#performance-tuning ' mesg += 'for information about these sysctl parameters.' self.eq(msgs[0]['message'], mesg) - self.eq(msgs[0]['sysctls'], [ + self.eq(msgs[0]['params']['sysctls'], [ {'name': 'vm.dirty_expire_centisecs', 'expected': 21, 'actual': sysctls['vm.dirty_expire_centisecs']}, {'name': 'vm.dirty_writeback_centisecs', 'expected': 21, 'actual': sysctls['vm.dirty_writeback_centisecs']}, ]) @@ -3498,10 +3494,10 @@ def __init__(self, proxy=None): async def proxy(self, timeout=None): return self._proxy - with self.getAsyncLoggerStream('synapse.lib.cell', 'AHA client connection failed.') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: cell.ahaclient = MockAhaClient() self.none(await cell.getAhaProxy()) - self.true(await stream.wait(timeout=1)) + await stream.expect('AHA client connection failed.', timeout=1) class MockProxyHasNot: def _hasTeleFeat(self, name, vers): @@ -3553,11 +3549,11 @@ async def mock_executor(func, *args, **kwargs): return mock_proc with mock.patch('synapse.lib.cell.s_coro.executor', mock_executor): - with self.getAsyncLoggerStream('synapse.lib.cell', 'Error during backup streaming') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: with self.raises(Exception) as cm: async for _ in proxy.iterBackupArchive('bkup'): pass - self.true(await stream.wait(timeout=6)) + await stream.expect('Error during backup streaming', timeout=6) async def test_iter_new_backup_archive(self): @@ -3575,13 +3571,13 @@ async def mock_runBackup(*args, **kwargs): raise Exception('backup failed') with mock.patch.object(s_cell.Cell, 'runBackup', mock_runBackup): - with self.getAsyncLoggerStream('synapse.lib.cell', 'Removing') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: with self.raises(s_exc.SynErr) as cm: async for _ in proxy.iterNewBackupArchive('failedbackup', remove=True): pass self.isin('backup failed', str(cm.exception)) - self.true(await stream.wait(timeout=6)) + await stream.expect('Removing', timeout=6) path = os.path.join(backdirn, 'failedbackup') self.false(os.path.exists(path)) @@ -3688,3 +3684,48 @@ async def bg_coro(): items.append(item) self.len(1, items) self.eq('sync', items[0][1]) + + async def test_cell_logging(self): + + s_logging.setup() + + async with self.getTestAha() as aha: + + # test some of the gather API implementations... + purl00 = await aha.addAhaSvcProv('00.cell') + cell00 = await aha.enter_context(self.getTestCell(conf={'aha:provision': purl00})) + + with self.getLoggerStream('synapse.tests.test_lib_cell') as stream: + # confirm last-one-wins "service" key is always initialized + logger.warning('oh hai there!') + mesg = stream.jsonlines()[0] + self.eq(mesg['service'], '00.cell.synapse') + + async with cell00.getLocalProxy() as proxy: + + logs = await proxy.logs() + + self.isin('oh hai there!', [m['message'] for m in logs]) + self.isin('00.cell.synapse', [m.get('service') for m in logs]) + + event = asyncio.Event() + async def sendlogs(): + await event.wait() + for i in range(4): + extra = cell00.getLogExtra(indx=i) + logger.warning('stream of logs', extra=extra) + await asyncio.sleep(0.01) + + task = cell00.schedCoro(sendlogs()) + + logs = [] + async for log in proxy.watch(): + + event.set() + if log['message'] == 'stream of logs': + logs.append(log) + + if len(logs) == 4: + break + + await task diff --git a/synapse/tests/test_lib_config.py b/synapse/tests/test_lib_config.py index 9cefc79f992..8fe9278c166 100644 --- a/synapse/tests/test_lib_config.py +++ b/synapse/tests/test_lib_config.py @@ -45,14 +45,14 @@ async def test_config_basics(self): # We can make an argparser that has config options populated in it # We explicitly skip boolean options without a default value so we # done end up in a ambiguous toggle case down the road. - mesg = 'Boolean type is missing default information. ' \ - 'Will not form argparse for [key:bool:nodefval]' pars = argparse.ArgumentParser('synapse.tests.test_lib_config.basics') pars.add_argument('--beep', type=str, help='beep option', default='beep.sys') - with self.getLoggerStream('synapse.lib.config', mesg) as stream: + with self.getLoggerStream('synapse.lib.config') as stream: for optname, optinfo in conf.getArgParseArgs(): pars.add_argument(optname, **optinfo) - self.true(stream.wait(3)) + await stream.expect('Boolean type is missing default information.', timeout=3) + await stream.expect('Will not form argparse for [key:bool:nodefval]', timeout=3) + hmsg = pars.format_help() # Undo pretty-printing @@ -239,10 +239,10 @@ async def test_config_basics(self): s_common.yamlsave({'key:integer': 5678, 'key:string': 'haha'}, dirn, '3.yaml') fp = s_common.genpath(dirn, '3.yaml') - with self.getAsyncLoggerStream('synapse.lib.config') as stream: + with self.getLoggerStream('synapse.lib.config') as stream: conf3.setConfFromFile(fp, force=True) - stream.seek(0) - buf = stream.read() + + buf = stream.getvalue() self.isin('Set configuration override for [key:integer]', buf) self.notin('Set configuration override for [key:string]', buf) self.eq(conf3.get('key:integer'), 5678) diff --git a/synapse/tests/test_lib_dyndeps.py b/synapse/tests/test_lib_dyndeps.py index 7e28db01ae9..47b7b7344f4 100644 --- a/synapse/tests/test_lib_dyndeps.py +++ b/synapse/tests/test_lib_dyndeps.py @@ -13,10 +13,10 @@ def woot(x, y=30): class DynDepsTest(s_t_utils.SynTest): - def test_dyndeps_dynmod(self): - with self.getLoggerStream('synapse.lib.dyndeps', 'Failed to import "- -"') as stream: + async def test_dyndeps_dynmod(self): + with self.getLoggerStream('synapse.lib.dyndeps') as stream: self.none(s_dyndeps.getDynMod('- -')) - self.true(stream.wait(1)) + await stream.expect('Failed to import \\"- -\\"', timeout=1) self.nn(s_dyndeps.getDynMod('sys')) def test_dyndeps_dynloc(self): diff --git a/synapse/tests/test_lib_httpapi.py b/synapse/tests/test_lib_httpapi.py index d2fc26c295a..41c24c40dd6 100644 --- a/synapse/tests/test_lib_httpapi.py +++ b/synapse/tests/test_lib_httpapi.py @@ -439,33 +439,33 @@ async def test_http_auth(self): async with self.getHttpSess() as sess: info = {'user': 'hehe', 'passwd': 'newp'} - with self.getAsyncLoggerStream('synapse.lib.httpapi', 'No such user.') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: async with sess.post(f'https://localhost:{port}/api/v1/login', json=info) as resp: self.eq(resp.status, http.HTTPStatus.NOT_FOUND) item = await resp.json() self.eq('AuthDeny', item.get('code')) - self.true(await stream.wait(timeout=6)) + await stream.expect('No such user.', timeout=6) async with self.getHttpSess() as sess: info = {'user': 'visi', 'passwd': 'secret'} await core.setUserLocked(visiiden, True) - with self.getAsyncLoggerStream('synapse.lib.httpapi', 'User is locked.') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: async with sess.post(f'https://localhost:{port}/api/v1/login', json=info) as resp: self.eq(resp.status, http.HTTPStatus.FORBIDDEN) item = await resp.json() self.eq('AuthDeny', item.get('code')) - self.true(await stream.wait(timeout=6)) + await stream.expect('User is locked.', timeout=6) await core.setUserLocked(visiiden, False) async with self.getHttpSess() as sess: info = {'user': 'visi', 'passwd': 'borked'} - with self.getAsyncLoggerStream('synapse.lib.httpapi', 'Incorrect password.') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: async with sess.post(f'https://localhost:{port}/api/v1/login', json=info) as resp: self.eq(resp.status, http.HTTPStatus.FORBIDDEN) item = await resp.json() self.eq('AuthDeny', item.get('code')) - self.true(await stream.wait(timeout=6)) + await stream.expect('Incorrect password.', timeout=6) async with self.getHttpSess() as sess: info = {'user': 'visi', 'passwd': 'secret'} @@ -518,28 +518,28 @@ async def test_http_auth(self): item = await resp.json() self.eq('ok', item.get('status')) - with self.getAsyncLoggerStream('synapse.lib.httpapi', 'No such user.') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: async with sess.get(f'https://localhost:{port}/api/v1/auth/users', auth=heheauth) as resp: self.eq(resp.status, http.HTTPStatus.UNAUTHORIZED) item = await resp.json() self.eq('NotAuthenticated', item.get('code')) - self.true(await stream.wait(timeout=12)) + await stream.expect('No such user.', timeout=12) await core.setUserLocked(visiiden, True) - with self.getAsyncLoggerStream('synapse.lib.httpapi', 'User is locked.') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: async with sess.get(f'https://localhost:{port}/api/v1/auth/users', auth=visiauth) as resp: self.eq(resp.status, http.HTTPStatus.UNAUTHORIZED) item = await resp.json() self.eq('NotAuthenticated', item.get('code')) - self.true(await stream.wait(timeout=12)) + await stream.expect('User is locked.', timeout=12) await core.setUserLocked(visiiden, False) - with self.getAsyncLoggerStream('synapse.lib.httpapi', 'Incorrect password.') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: async with sess.get(f'https://localhost:{port}/api/v1/auth/users', auth=newpauth) as resp: self.eq(resp.status, http.HTTPStatus.UNAUTHORIZED) item = await resp.json() self.eq('NotAuthenticated', item.get('code')) - self.true(await stream.wait(timeout=12)) + await stream.expect('Incorrect password.', timeout=12) headers = {'Authorization': 'yermom'} async with sess.get(f'https://localhost:{port}/api/v1/auth/users', headers=headers) as resp: @@ -1916,11 +1916,6 @@ async def test_http_sess_mirror(self): async def test_request_logging(self): - def get_mesg(stream: s_tests.AsyncStreamEvent) -> dict: - msgs = stream.jsonlines() - self.len(1, msgs) - return msgs[0] - async with self.getTestCore() as core: # structlog tests @@ -1936,7 +1931,7 @@ def get_mesg(stream: s_tests.AsyncStreamEvent) -> dict: # Basic-auth - with self.getStructuredAsyncLoggerStream(logname, 'api/v1/auth/adduser') as stream: + with self.getLoggerStream(logname) as stream: headers = { 'X-Forwarded-For': '1.2.3.4', @@ -1948,57 +1943,57 @@ def get_mesg(stream: s_tests.AsyncStreamEvent) -> dict: self.nn(item.get('result').get('iden')) visiiden = item['result']['iden'] self.eq(resp.status, http.HTTPStatus.OK) - self.true(await stream.wait(6)) - - mesg = get_mesg(stream) - self.eq(mesg.get('uri'), '/api/v1/auth/adduser') - self.eq(mesg.get('username'), 'root') - self.eq(mesg.get('user'), core.auth.rootuser.iden) - self.isin('headers', mesg) - self.eq(mesg['headers'].get('user-agent'), 'test_request_logging') - self.isin('remoteip', mesg) + await stream.expect('api/v1/auth/adduser', timeout=6) + + mesg = stream.jsonlines()[0] + self.eq(mesg['params'].get('uri'), '/api/v1/auth/adduser') + self.eq(mesg['params'].get('username'), 'root') + self.eq(mesg['params'].get('user'), core.auth.rootuser.iden) + self.isin('headers', mesg['params']) + self.eq(mesg['params']['headers'].get('user-agent'), 'test_request_logging') + self.isin('remoteip', mesg['params']) self.isin('(root)', mesg.get('message')) self.isin('200 POST /api/v1/auth/adduser', mesg.get('message')) self.notin('1.2.3.4', mesg.get('message')) # No auth provided - with self.getStructuredAsyncLoggerStream(logname, 'api/v1/active') as stream: + with self.getLoggerStream(logname) as stream: async with sess.get(f'https://root:root@localhost:{port}/api/v1/active', skip_auto_headers=['User-Agent']) as resp: self.eq(resp.status, http.HTTPStatus.OK) - self.true(await stream.wait(6)) - - mesg = get_mesg(stream) - self.eq(mesg.get('uri'), '/api/v1/active') - self.notin('headers', mesg) - self.notin('username', mesg) - self.notin('user', mesg) - self.isin('remoteip', mesg) + await stream.expect('api/v1/active', timeout=6) + + mesg = stream.jsonlines()[0] + self.eq(mesg['params'].get('uri'), '/api/v1/active') + self.notin('headers', mesg['params']) + self.notin('username', mesg['params']) + self.notin('user', mesg['params']) + self.isin('remoteip', mesg['params']) self.isin('200 GET /api/v1/active', mesg.get('message')) # Sessions populate the data too async with self.getHttpSess() as sess: # api/v1/login populates the data - with self.getStructuredAsyncLoggerStream(logname, 'api/v1/login') as stream: + with self.getLoggerStream(logname) as stream: async with sess.post(f'https://localhost:{port}/api/v1/login', json={'user': 'visi', 'passwd': 'secret'}) as resp: self.eq(resp.status, http.HTTPStatus.OK) - self.true(await stream.wait(6)) + await stream.expect('api/v1/login', timeout=6) - mesg = get_mesg(stream) - self.eq(mesg.get('uri'), '/api/v1/login') - self.eq(mesg.get('username'), 'visi') - self.eq(mesg.get('user'), visiiden) + mesg = stream.jsonlines()[0] + self.eq(mesg['params'].get('uri'), '/api/v1/login') + self.eq(mesg['params'].get('username'), 'visi') + self.eq(mesg['params'].get('user'), visiiden) # session cookie loging populates the data upon reuse - with self.getStructuredAsyncLoggerStream(logname, 'api/v1/auth/users') as stream: + with self.getLoggerStream(logname) as stream: async with sess.get(f'https://localhost:{port}/api/v1/auth/users') as resp: self.eq(resp.status, http.HTTPStatus.OK) - self.true(await stream.wait(6)) + await stream.expect('api/v1/auth/users', timeout=6) - mesg = get_mesg(stream) - self.eq(mesg.get('uri'), '/api/v1/auth/users') - self.eq(mesg.get('username'), 'visi') - self.eq(mesg.get('user'), visiiden) + mesg = stream.jsonlines()[0] + self.eq(mesg['params'].get('uri'), '/api/v1/auth/users') + self.eq(mesg['params'].get('username'), 'visi') + self.eq(mesg['params'].get('user'), visiiden) async with self.getTestCore(conf={'https:parse:proxy:remoteip': True}) as core: @@ -2015,38 +2010,38 @@ def get_mesg(stream: s_tests.AsyncStreamEvent) -> dict: # Basic-auth - with self.getStructuredAsyncLoggerStream(logname, 'api/v1/auth/adduser') as stream: + with self.getLoggerStream(logname) as stream: async with sess.post(f'https://root:root@localhost:{port}/api/v1/auth/adduser', json=info, headers={'X-Forwarded-For': '1.2.3.4'}) as resp: item = await resp.json() self.nn(item.get('result').get('iden')) self.eq(resp.status, http.HTTPStatus.OK) - self.true(await stream.wait(6)) + await stream.expect('api/v1/auth/adduser', timeout=6) - mesg = get_mesg(stream) - self.eq(mesg.get('uri'), '/api/v1/auth/adduser') - self.eq(mesg.get('username'), 'root') - self.eq(mesg.get('user'), core.auth.rootuser.iden) - self.eq(mesg.get('remoteip'), '1.2.3.4') + mesg = stream.jsonlines()[0] + self.eq(mesg['params'].get('uri'), '/api/v1/auth/adduser') + self.eq(mesg['params'].get('username'), 'root') + self.eq(mesg['params'].get('user'), core.auth.rootuser.iden) + self.eq(mesg['params'].get('remoteip'), '1.2.3.4') self.isin('(root)', mesg.get('message')) self.isin('200 POST /api/v1/auth/adduser', mesg.get('message')) info = {'name': 'charles', 'passwd': 'secret', 'admin': True} - with self.getStructuredAsyncLoggerStream(logname, 'api/v1/auth/adduser') as stream: + with self.getLoggerStream(logname) as stream: async with sess.post(f'https://root:root@localhost:{port}/api/v1/auth/adduser', json=info, headers={'X-Real-Ip': '8.8.8.8'}) as resp: item = await resp.json() self.nn(item.get('result').get('iden')) self.eq(resp.status, http.HTTPStatus.OK) - self.true(await stream.wait(6)) + await stream.expect('api/v1/auth/adduser', timeout=6) - mesg = get_mesg(stream) - self.eq(mesg.get('uri'), '/api/v1/auth/adduser') - self.eq(mesg.get('username'), 'root') - self.eq(mesg.get('user'), core.auth.rootuser.iden) - self.eq(mesg.get('remoteip'), '8.8.8.8') + mesg = stream.jsonlines()[0] + self.eq(mesg['params'].get('uri'), '/api/v1/auth/adduser') + self.eq(mesg['params'].get('username'), 'root') + self.eq(mesg['params'].get('user'), core.auth.rootuser.iden) + self.eq(mesg['params'].get('remoteip'), '8.8.8.8') self.isin('(root)', mesg.get('message')) self.isin('200 POST /api/v1/auth/adduser', mesg.get('message')) @@ -2200,10 +2195,9 @@ async def test_http_locked_admin(self): item = await resp.json() self.eq('ok', item.get('status')) - with self.getAsyncLoggerStream('synapse.lib.cell', - 'Invalidated HTTP session for locked user visi') as stream: + with self.getLoggerStream('synapse.lib.cell') as stream: await core.setUserLocked(visi.iden, True) - self.true(await stream.wait(timeout=2)) + await stream.expect('Invalidated HTTP session for locked user visi', timeout=2) resp = await sess.get(f'{root}/api/v1/auth/users') self.eq(resp.status, http.HTTPStatus.UNAUTHORIZED) diff --git a/synapse/tests/test_lib_link.py b/synapse/tests/test_lib_link.py index 8327545cd99..a48dbb5e336 100644 --- a/synapse/tests/test_lib_link.py +++ b/synapse/tests/test_lib_link.py @@ -121,9 +121,9 @@ async def onlink(link): self.eq(msg0, ('what', {'k': 1})) evt.set() await asyncio.sleep(0) - with self.getAsyncLoggerStream('synapse.lib.link', 'rx closed unexpectedly') as stream: + with self.getLoggerStream('synapse.lib.link') as stream: msg1 = await link.rx() - self.true(await stream.wait(6)) + await stream.expect('rx closed unexpectedly', timeout=6) self.none(msg1) async def test_link_file(self): diff --git a/synapse/tests/test_lib_lmdbslab.py b/synapse/tests/test_lib_lmdbslab.py index a9d35a2f6dc..0988a067015 100644 --- a/synapse/tests/test_lib_lmdbslab.py +++ b/synapse/tests/test_lib_lmdbslab.py @@ -351,19 +351,19 @@ async def test_lmdbslab_commit_warn(self): with self.getTestDir() as dirn, patch('synapse.lib.lmdbslab.Slab.WARN_COMMIT_TIME_MS', 1), \ patch('synapse.common.now', self.simplenow): path = os.path.join(dirn, 'test.lmdb') - with self.getStructuredAsyncLoggerStream('synapse.lib.lmdbslab', 'Commit with') as stream: + with self.getLoggerStream('synapse.lib.lmdbslab') as stream: async with await s_lmdbslab.Slab.anit(path, map_size=100000) as slab: foo = slab.initdb('foo', dupsort=True) byts = b'\x00' * 256 for i in range(10): slab.put(b'\xff\xff\xff\xff' + s_common.guid(i).encode('utf8'), byts, db=foo) - self.true(await stream.wait(timeout=1)) + await stream.expect('Commit with', timeout=1) msgs = stream.jsonlines() self.gt(len(msgs), 0) - self.nn(msgs[0].get('delta')) - self.nn(msgs[0].get('path')) - self.nn(msgs[0].get('xactopslen')) + self.nn(msgs[0]['params'].get('delta')) + self.nn(msgs[0]['params'].get('path')) + self.nn(msgs[0]['params'].get('xactopslen')) self.sorteq([ 'vm.swappiness', 'vm.dirty_expire_centisecs', @@ -372,7 +372,7 @@ async def test_lmdbslab_commit_warn(self): 'vm.dirty_background_bytes', 'vm.dirty_ratio', 'vm.dirty_bytes', - ], msgs[0].get('sysctls', {}).keys()) + ], msgs[0]['params'].get('sysctls', {}).keys()) async def test_lmdbslab_commit_over_max_xactops(self): diff --git a/synapse/tests/test_lib_logging.py b/synapse/tests/test_lib_logging.py new file mode 100644 index 00000000000..eb8d14fefa3 --- /dev/null +++ b/synapse/tests/test_lib_logging.py @@ -0,0 +1,79 @@ +import asyncio +import logging + +import synapse.exc as s_exc + +import synapse.lib.base as s_base +import synapse.lib.logging as s_logging + +import synapse.tests.utils as s_test + +logger = logging.getLogger(__name__) + +class LoggingTest(s_test.SynTest): + + async def test_lib_logging(self): + + s_logging.setup() + + self.eq(10, s_logging.normLogLevel(' 10 ')) + self.eq(10, s_logging.normLogLevel(10)) + self.eq(20, s_logging.normLogLevel(' inFo\n')) + + with self.raises(s_exc.BadArg): + s_logging.normLogLevel(100) + + with self.raises(s_exc.BadArg): + s_logging.normLogLevel('BEEP') + + with self.raises(s_exc.BadArg): + s_logging.normLogLevel('12') + + with self.raises(s_exc.BadArg): + s_logging.normLogLevel({'key': 'newp'}) + + s_logging.reset() + + self.none(s_logging.StreamHandler._pump_task) + self.none(s_logging.StreamHandler._pump_event) + self.len(0, s_logging.StreamHandler._logs_fifo) + self.len(0, s_logging.StreamHandler._text_todo) + self.len(0, s_logging.StreamHandler._logs_todo) + + with self.getLoggerStream('synapse.tests.test_lib_logging') as stream: + + try: + try: + raise s_exc.SynErr(mesg='omg') + except Exception as e1: + e1.add_note('inner note') + raise s_exc.NoSuchType(mesg='woot') from e1 + except Exception as e0: + e0.add_note('outer note') + logger.exception(e0) + + mesg = stream.jsonlines()[0] + self.eq(mesg['error']['code'], 'NoSuchType') + self.eq(mesg['error']['notes'], ('outer note',)) + self.eq(mesg['error']['cause']['code'], 'SynErr') + self.eq(mesg['error']['cause']['notes'], ('inner note',)) + + self.none(mesg['error'].get('context')) + + stream.clear() + + try: + try: + raise s_exc.SynErr(mesg='omg') + except Exception as e1: + e1.add_note('inner note') + raise s_exc.NoSuchType(mesg='woot') + except Exception as e0: + e0.add_note('outer note') + logger.exception(e0) + + mesg = stream.jsonlines()[0] + self.eq(mesg['error']['code'], 'NoSuchType') + self.eq(mesg['error']['notes'], ('outer note',)) + self.eq(mesg['error']['context']['code'], 'SynErr') + self.eq(mesg['error']['context']['notes'], ('inner note',)) diff --git a/synapse/tests/test_lib_modelrev.py b/synapse/tests/test_lib_modelrev.py index 6e818ea62ea..1564cab19f7 100644 --- a/synapse/tests/test_lib_modelrev.py +++ b/synapse/tests/test_lib_modelrev.py @@ -389,10 +389,9 @@ async def test_modelrev_0_2_19(self): self.len(1, await core.nodes('ou:campname="operation overlord" -> ou:campaign')) self.len(1, await core.nodes('risk:vuln:type:taxonomy="cyber.int_overflow" -> risk:vuln')) - with self.getAsyncLoggerStream('synapse.lib.modelrev', - 'error re-norming risk:vuln:type=foo.bar...newp') as stream: + with self.getLoggerStream('synapse.lib.modelrev') as stream: async with self.getRegrCore('model-0.2.19-bad-risk-types') as core: - self.true(await stream.wait(timeout=6)) + await stream.expect('error re-norming risk:vuln:type=foo.bar...newp', timeout=6) self.len(5, await core.nodes('risk:vuln')) self.len(4, await core.nodes('risk:vuln:type')) nodes = await core.nodes('yield $lib.lift.byNodeData(_migrated:risk:vuln:type)') diff --git a/synapse/tests/test_lib_module.py b/synapse/tests/test_lib_module.py index 0a21a363888..effdda02f94 100644 --- a/synapse/tests/test_lib_module.py +++ b/synapse/tests/test_lib_module.py @@ -71,15 +71,15 @@ async def test_basics(self): async def test_load_failures(self): async with self.getTestCore() as core: # type: s_cortex.Cortex with self.setTstEnvars(SYN_TEST_MOD_FAIL_PRE=1) as cm: - with self.getAsyncLoggerStream('synapse.cortex', 'preCoreModuleFail') as stream: + with self.getLoggerStream('synapse.cortex') as stream: self.none(await core.loadCoreModule(foo_ctor)) - self.true(await stream.wait(1)) + await stream.expect('preCoreModuleFail', timeout=1) self.none(core.getCoreMod(foo_ctor)) with self.setTstEnvars(SYN_TEST_MOD_FAIL_INIT=1) as cm: - with self.getAsyncLoggerStream('synapse.cortex', 'initCoreModuleFail') as stream: + with self.getLoggerStream('synapse.cortex') as stream: self.none(await core.loadCoreModule(foo_ctor)) - self.true(await stream.wait(1)) + await stream.expect('initCoreModuleFail', timeout=1) self.none(core.getCoreMod(foo_ctor)) with self.getTestDir(mirror='testcore') as dirn: @@ -89,13 +89,13 @@ async def test_load_failures(self): conf = s_common.yamlload(dirn, 'cell.yaml') with self.setTstEnvars(SYN_TEST_MOD_FAIL_PRE=1) as cm: - with self.getAsyncLoggerStream('synapse.cortex', 'preCoreModuleFail') as stream: + with self.getLoggerStream('synapse.cortex') as stream: async with await s_cortex.Cortex.anit(dirn) as core: - self.true(await stream.wait(1)) + await stream.expect('preCoreModuleFail', timeout=1) self.none(core.getCoreMod(foo_ctor)) with self.setTstEnvars(SYN_TEST_MOD_FAIL_INIT=1) as cm: - with self.getAsyncLoggerStream('synapse.cortex', 'initCoreModuleFail') as stream: + with self.getLoggerStream('synapse.cortex') as stream: async with await s_cortex.Cortex.anit(dirn) as core: - self.true(await stream.wait(1)) + await stream.expect('initCoreModuleFail', timeout=1) self.none(core.getCoreMod(foo_ctor)) diff --git a/synapse/tests/test_lib_multislabseqn.py b/synapse/tests/test_lib_multislabseqn.py index 6da98ec69b3..9032730908f 100644 --- a/synapse/tests/test_lib_multislabseqn.py +++ b/synapse/tests/test_lib_multislabseqn.py @@ -339,10 +339,10 @@ async def test_multislabseqn_discover(self): shutil.rmtree(slab0dirn) s_json.jssave('{}', slab0dirn) - with self.getAsyncLoggerStream('synapse.lib.multislabseqn', 'non-directory') as stream: + with self.getLoggerStream('synapse.lib.multislabseqn') as stream: async with await s_multislabseqn.MultiSlabSeqn.anit(baddirn) as msqn: await self.agenlen(15, msqn.iter(0)) - await stream.wait(timeout=1) + await stream.expect('non-directory', timeout=1) # Switcheroo @@ -367,10 +367,10 @@ async def test_multislabseqn_discover(self): slab10dirn = s_common.genpath(baddirn, f'seqn{"0" * 14}0a.lmdb') shutil.rmtree(slab10dirn) - with self.getAsyncLoggerStream('synapse.lib.multislabseqn', 'gap in indices') as stream: + with self.getLoggerStream('synapse.lib.multislabseqn') as stream: async with await s_multislabseqn.MultiSlabSeqn.anit(baddirn) as msqn: await self.agenlen(15, msqn.iter(0)) - await stream.wait(timeout=1) + await stream.expect('gap in indices', timeout=1) # Wipe a seqn clean baddirn = s_common.genpath(dirn, 'bad4') diff --git a/synapse/tests/test_lib_storm.py b/synapse/tests/test_lib_storm.py index e4f2b512870..144c5e8869d 100644 --- a/synapse/tests/test_lib_storm.py +++ b/synapse/tests/test_lib_storm.py @@ -1762,12 +1762,10 @@ async def test_storm_diff_merge(self): await visi.addRule((True, ('node', 'add')), gateiden=lowriden) - with self.getAsyncLoggerStream('synapse.lib.snap') as stream: + with self.getLoggerStream('synapse.lib.snap') as stream: await core.stormlist('ou:name | merge --apply', opts=altview) - stream.seek(0) - buf = stream.read() - self.notin("No form named None", buf) + self.notin("No form named None", stream.getvalue()) await core.nodes('[ ou:name=baz ]') await core.nodes('ou:name=baz [ +#new.tag .seen=now ]', opts=altview) @@ -1784,12 +1782,10 @@ async def test_storm_diff_merge(self): newn = await core.nodes('[ ou:name=readonly ]') self.ne(oldn[0].props['.created'], newn[0].props['.created']) - with self.getAsyncLoggerStream('synapse.lib.snap') as stream: + with self.getLoggerStream('synapse.lib.snap') as stream: await core.stormlist('ou:name | merge --apply', opts=altview) - stream.seek(0) - buf = stream.read() - self.notin("Property is read only: ou:name.created", buf) + self.notin("Property is read only: ou:name.created", stream.getvalue()) newn = await core.nodes('ou:name=readonly') self.eq(oldn[0].props['.created'], newn[0].props['.created']) @@ -1804,12 +1800,10 @@ async def test_storm_diff_merge(self): q = 'ou:name=readonly2 | movenodes --apply --srclayers $lib.view.get().layers.2.iden' await core.nodes(q, opts=altview2) - with self.getAsyncLoggerStream('synapse.lib.snap') as stream: + with self.getLoggerStream('synapse.lib.snap') as stream: await core.stormlist('ou:name | merge --apply', opts=altview2) - stream.seek(0) - buf = stream.read() - self.notin("Property is read only: ou:name.created", buf) + self.notin("Property is read only: ou:name.created", stream.getvalue()) newn = await core.nodes('ou:name=readonly2', opts=altview) self.eq(oldn[0].props['.created'], newn[0].props['.created']) @@ -2591,29 +2585,32 @@ async def test_storm_dmon_user_locked(self): name=hehedmon))''' ddef0 = await asvisi.callStorm(q) - with self.getAsyncLoggerStream('synapse.lib.storm', 'user is locked') as stream: + with self.getLoggerStream('synapse.lib.storm') as stream: await visi.setLocked(True) q = 'return($lib.dmon.bump($iden))' self.true(await core.callStorm(q, opts={'vars': {'iden': ddef0['iden']}})) - self.true(await stream.wait(2)) + await stream.expect('user is locked', timeout=2) async def test_storm_dmon_user_autobump(self): async with self.getTestCore() as core: visi = await core.auth.addUser('visi') await visi.addRule((True, ('dmon', 'add'))) async with core.getLocalProxy(user='visi') as asvisi: - with self.getAsyncLoggerStream('synapse.lib.storm', 'Dmon query exited') as stream: + with self.getLoggerStream('synapse.lib.storm') as stream: q = '''return($lib.dmon.add(${{ $lib.print(foobar) $lib.time.sleep(10) }}, name=hehedmon))''' await asvisi.callStorm(q) + await stream.expect('Dmon query exited', timeout=6) + + stream.clear() - with self.getAsyncLoggerStream('synapse.lib.storm', 'user is locked') as stream: await core.setUserLocked(visi.iden, True) - self.true(await stream.wait(2)) + await stream.expect('user is locked', timeout=2) + + stream.clear() - with self.getAsyncLoggerStream('synapse.lib.storm', 'Dmon query exited') as stream: await core.setUserLocked(visi.iden, False) - self.true(await stream.wait(2)) + await stream.expect('Dmon query exited', timeout=2) async def test_storm_dmon_caching(self): @@ -3058,19 +3055,19 @@ async def loadPkg(core, pkg): ]) mesg = 'testload init vers=4 output: (\'SynErr\'' - with self.getAsyncLoggerStream('synapse.cortex', mesg) as stream: + with self.getLoggerStream('synapse.cortex') as stream: await loadPkg(core, pkg) self.eq(3, await core.getStormPkgVar('testload', 'storage:version')) - await stream.wait(timeout=10) + await stream.expect(mesg, timeout=10) self.none(await core.getStormVar('init04')) self.none(await core.getStormVar('init06')) await core.setStormVar('dofail', False) - with self.getAsyncLoggerStream('synapse.cortex', 'testload finished onload') as stream: + with self.getLoggerStream('synapse.cortex') as stream: async with self.getTestCore(dirn=dirn) as core: - await stream.wait(timeout=10) + await stream.expect('testload finished onload', timeout=10) # prior versions dont re-run, but a failed one does @@ -3089,9 +3086,9 @@ async def loadPkg(core, pkg): 'query': '$lib.print("doing a print")', }) - with self.getAsyncLoggerStream('synapse.cortex', 'doing a print') as stream: + with self.getLoggerStream('synapse.cortex') as stream: await loadPkg(core, pkg) - await stream.wait(timeout=10) + await stream.expect('doing a print', timeout=10) self.eq(7, await core.getStormPkgVar('testload', 'storage:version')) pkg['version'] = '0.6.0' @@ -3101,9 +3098,9 @@ async def loadPkg(core, pkg): 'query': '$lib.warn("doing a warn")', }) - with self.getAsyncLoggerStream('synapse.cortex', 'doing a warn') as stream: + with self.getLoggerStream('synapse.cortex') as stream: await loadPkg(core, pkg) - await stream.wait(timeout=10) + await stream.expect('doing a warn', timeout=10) self.eq(8, await core.getStormPkgVar('testload', 'storage:version')) # init that advances the version @@ -3508,15 +3505,15 @@ async def test_storm_iden(self): nodes = await core.nodes(f'[test:str=hehe] | iden {iq}') self.len(3, nodes) - q = 'iden newp' - with self.getLoggerStream('synapse.lib.snap', 'Failed to decode iden') as stream: - self.len(0, await core.nodes(q)) - self.true(stream.wait(1)) + with self.getLoggerStream('synapse.lib.snap') as stream: + + self.len(0, await core.nodes('iden newp')) + await stream.expect('Failed to decode iden', timeout=1) + + stream.clear() - q = 'iden deadb33f' - with self.getLoggerStream('synapse.lib.snap', 'iden must be 32 bytes') as stream: - self.len(0, await core.nodes(q)) - self.true(stream.wait(1)) + self.len(0, await core.nodes('iden deadb33f')) + await stream.expect('iden must be 32 bytes', timeout=1) # Runtsafety test q = 'test:str=hehe | iden $node.iden()' diff --git a/synapse/tests/test_lib_stormlib_cortex.py b/synapse/tests/test_lib_stormlib_cortex.py index 6194038120e..ad6f2d6bf60 100644 --- a/synapse/tests/test_lib_stormlib_cortex.py +++ b/synapse/tests/test_lib_stormlib_cortex.py @@ -300,12 +300,12 @@ async def test_libcortex_httpapi_methods(self): # Storm query logging includes the httpapi iden in structlog data core.stormlog = True - with self.getStructuredAsyncLoggerStream('synapse.storm', 'Executing storm query') as stream: + with self.getLoggerStream('synapse.storm') as stream: resp = await sess.get(url) self.eq(resp.status, http.HTTPStatus.OK) - self.true(await stream.wait(timeout=12)) + await stream.expect('Executing storm query', timeout=12) msgs = stream.jsonlines() - self.eq(msgs[0].get('httpapi'), echoiden) + self.eq(msgs[0]['params'].get('httpapi'), echoiden) core.stormlog = False # Sad paths on the $request methods @@ -336,11 +336,12 @@ async def test_libcortex_httpapi_methods(self): return ( $api.iden )''' test04 = await core.callStorm(q) - emsg = f'Error executing custom HTTP API {test04}: BadArg Response.reply() has already been called.' - with self.getAsyncLoggerStream('synapse.lib.httpapi', emsg) as stream: + emsg = f'Error executing Extended HTTP API {test04}: BadArg Response.reply() has already been called.' + with self.getLoggerStream('synapse.lib.httpapi') as stream: resp = await sess.get(f'https://localhost:{hport}/api/ext/testpath04') self.eq(resp.status, http.HTTPStatus.OK) self.eq(await resp.json(), {'hehe': 'yes!'}) + await stream.expect(emsg, timeout=6) async def test_libcortex_httpapi_runas_owner(self): async with self.getTestCore() as core: @@ -1322,18 +1323,15 @@ async def test_libcortex_httpapi_fsm_sadpath(self): self.eq(data.get('code'), 'StormRuntimeError') self.eq(data.get('mesg'), f'Extended HTTP API {iden02} must set status code before sending body.') - with self.getAsyncLoggerStream('synapse.lib.httpapi', - f'Extended HTTP API {iden03} tried to set code after sending body.') as stream: - + with self.getLoggerStream('synapse.lib.httpapi') as stream: resp = await sess.get(f'https://localhost:{hport}/api/ext/bad03') - self.true(await stream.wait(timeout=6)) + await stream.expect(f'Extended HTTP API {iden03} tried to set code after sending body.', timeout=6) self.eq(resp.status, http.HTTPStatus.CREATED) self.eq(await resp.read(), b'text') - with self.getAsyncLoggerStream('synapse.lib.httpapi', - f'Extended HTTP API {iden04} tried to set headers after sending body.') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: resp = await sess.get(f'https://localhost:{hport}/api/ext/bad04') - self.true(await stream.wait(timeout=6)) + await stream.expect(f'Extended HTTP API {iden04} tried to set headers after sending body.', timeout=6) self.eq(resp.status, http.HTTPStatus.OK) self.eq(await resp.read(), b'text') @@ -1343,17 +1341,15 @@ async def test_libcortex_httpapi_fsm_sadpath(self): self.eq(data.get('code'), 'BadTypeValu') self.eq(data.get('mesg'), "invalid literal for int() with base 0: 'notAnInt'") - with self.getAsyncLoggerStream('synapse.lib.httpapi', - f'Error executing Extended HTTP API {iden06}: BadTypeValu') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: resp = await sess.get(f'https://localhost:{hport}/api/ext/bad06') - self.true(await stream.wait(timeout=6)) + await stream.expect(f'Error executing Extended HTTP API {iden06}: BadTypeValu', timeout=6) self.eq(resp.status, http.HTTPStatus.CREATED) self.eq(await resp.json(), {}) - with self.getAsyncLoggerStream('synapse.lib.httpapi', - f'Error executing Extended HTTP API {iden07}: StormRuntimeError') as stream: + with self.getLoggerStream('synapse.lib.httpapi') as stream: resp = await sess.get(f'https://localhost:{hport}/api/ext/bad07') - self.true(await stream.wait(timeout=6)) + await stream.expect(f'Error executing Extended HTTP API {iden07}: StormRuntimeError', timeout=6) self.eq(resp.status, http.HTTPStatus.INTERNAL_SERVER_ERROR) data = await resp.json() self.eq(data.get('code'), 'StormRuntimeError') diff --git a/synapse/tests/test_lib_stormlib_log.py b/synapse/tests/test_lib_stormlib_log.py index 61f4fffcee2..532beac0075 100644 --- a/synapse/tests/test_lib_stormlib_log.py +++ b/synapse/tests/test_lib_stormlib_log.py @@ -11,50 +11,30 @@ async def test_stormlib_log(self): async with self.getTestCore() as core: # Raw message - with self.getAsyncLoggerStream(logname, 'debug message') as stream: + with self.getLoggerStream(logname, 'debug message') as stream: + await core.callStorm('$lib.log.debug("debug message")') - self.true(await stream.wait(6)) - with self.getAsyncLoggerStream(logname, 'info message') as stream: + await stream.expect('debug message', timeout=6) + await core.callStorm('$lib.log.info("info message")') - self.true(await stream.wait(6)) - with self.getAsyncLoggerStream(logname, 'warn message') as stream: + await stream.expect('info message', timeout=6) + await core.callStorm('$lib.log.warning("warn message")') - self.true(await stream.wait(6)) - with self.getAsyncLoggerStream(logname, 'error message') as stream: + await stream.expect('warn message', timeout=6) + await core.callStorm('$lib.log.error("error message")') - self.true(await stream.wait(6)) + await stream.expect('error message', timeout=6) - # Extra without structlog handler in place has no change in results - with self.getAsyncLoggerStream(logname, 'debug message') as stream: await core.callStorm('$lib.log.debug("debug message", extra=({"key": "valu"}))') - self.true(await stream.wait(6)) - stream.seek(0) - self.eq(stream.read(), 'debug message\n') - - # Extra can be empty too - with self.getAsyncLoggerStream(logname, 'debug message') as stream: - await core.callStorm('$lib.log.debug("debug message", extra=({}))') - self.true(await stream.wait(6)) - stream.seek(0) - self.eq(stream.read(), 'debug message\n') + await stream.expect('debug message', timeout=6) + await stream.expect('"key":"valu"', timeout=6) # Extra must be a dict after toprim is called on him. with self.raises(s_exc.BadArg): await core.callStorm('$lib.log.debug("debug message", extra=(foo, bar, baz))') - # structlog test - with self.getStructuredAsyncLoggerStream(logname, '"key":"valu"') as stream: - await core.callStorm('$lib.log.debug("struct1 message")') - await core.callStorm('$lib.log.debug("struct2 message", extra=({"key": "valu"}))') - self.true(await stream.wait(6)) - msgs = stream.jsonlines() - self.len(2, msgs) - mesg = msgs[0] - self.eq(mesg.get('logger').get('name'), 'synapse.storm.log') - self.eq(mesg.get('message'), 'struct1 message') - self.none(mesg.get('key')) + mesg = stream.jsonlines()[-1] - mesg = msgs[1] self.eq(mesg.get('logger').get('name'), 'synapse.storm.log') - self.eq(mesg.get('message'), 'struct2 message') - self.eq(mesg.get('key'), 'valu') + self.eq(mesg.get('message'), 'debug message') + self.eq(mesg['params'].get('key'), 'valu') diff --git a/synapse/tests/test_lib_stormlib_model.py b/synapse/tests/test_lib_stormlib_model.py index ed266b6e8a2..e322937e788 100644 --- a/synapse/tests/test_lib_stormlib_model.py +++ b/synapse/tests/test_lib_stormlib_model.py @@ -230,13 +230,12 @@ async def test_stormlib_model_depr(self): with self.raises(s_exc.IsDeprLocked): await core.nodes('[ou:hasalias=(*, hehe)]') - with self.getAsyncLoggerStream('synapse.lib.snap', - 'Prop ou:org:sic is locked due to deprecation') as stream: + with self.getLoggerStream('synapse.lib.snap') as stream: data = ( (('ou:org', ('t0',)), {'props': {'sic': '5678'}}), ) await core.addFeedData('syn.nodes', data) - self.true(await stream.wait(1)) + await stream.expect('Prop ou:org:sic is locked due to deprecation', timeout=1) nodes = await core.nodes('ou:org=(t0,)') self.none(nodes[0].get('sic')) diff --git a/synapse/tests/test_lib_stormlib_pkg.py b/synapse/tests/test_lib_stormlib_pkg.py index 1844f74ac29..627b8116ecd 100644 --- a/synapse/tests/test_lib_stormlib_pkg.py +++ b/synapse/tests/test_lib_stormlib_pkg.py @@ -104,9 +104,9 @@ async def test_stormlib_pkg_basic(self): } } - with self.getAsyncLoggerStream('synapse.cortex', 'bazfaz requirement') as stream: + with self.getLoggerStream('synapse.cortex') as stream: await core.addStormPkg(pkgdef) - self.true(await stream.wait(timeout=1)) + await stream.expect('bazfaz requirement', timeout=1) pkgdef = { 'name': 'bazfaz', @@ -118,9 +118,9 @@ async def test_stormlib_pkg_basic(self): } } - with self.getAsyncLoggerStream('synapse.cortex', 'bazfaz optional requirement') as stream: + with self.getLoggerStream('synapse.cortex') as stream: await core.addStormPkg(pkgdef) - self.true(await stream.wait(timeout=1)) + await stream.expect('bazfaz optional requirement', timeout=1) deps = await core.callStorm('return($lib.pkg.deps($pkgdef))', opts={'vars': {'pkgdef': pkgdef}}) self.eq({ @@ -240,15 +240,14 @@ async def get(self, name): # because the pkg hasn't changed so no loading occurs waiter = core.waiter(1, 'core:pkg:onload:complete') - with self.getAsyncLoggerStream('synapse.cortex') as stream: + with self.getLoggerStream('synapse.cortex') as stream: msgs = await core.stormlist(f'pkg.load --ssl-noverify https://127.0.0.1:{port}/api/v1/pkgtest/yep') self.stormIsInPrint('testload @0.3.0', msgs) msgs = await core.stormlist(f'pkg.load --ssl-noverify --raw https://127.0.0.1:{port}/api/v1/pkgtestraw/yep') self.stormIsInPrint('testload @0.3.0', msgs) - stream.seek(0) - buf = stream.read() + buf = stream.getvalue() self.isin("testload onload output: teststring", buf) self.isin("testload onload output: testwarn", buf) self.isin("No var with name: newp", buf) diff --git a/synapse/tests/test_lib_stormlib_storm.py b/synapse/tests/test_lib_stormlib_storm.py index 2052923b3f9..e24696b9a46 100644 --- a/synapse/tests/test_lib_stormlib_storm.py +++ b/synapse/tests/test_lib_stormlib_storm.py @@ -40,29 +40,14 @@ async def test_lib_stormlib_storm_eval(self): await core.callStorm('$lib.storm.eval( "{$lib.auth.users.add(readonly)}" )', opts={'readonly': True}) with self.getLoggerStream('synapse.storm') as stream: - q = '''{ - $lib.log.info(hehe) - [test:str=omg] - $lib.log.info($node) - fini { return(wow) } - } - ''' - core.stormlog = True - opts = {'vars': {'q': q}} + opts = {'vars': {'q': 'woot'}} ret = await core.callStorm('return( $lib.storm.eval($q) )', opts=opts) - self.eq(ret, 'wow') - self.len(1, await core.nodes('test:str=omg')) + self.eq(ret, 'woot') # Check that we saw the logs - stream.seek(0) - data = stream.read() - - mesg = 'Executing storm query {return( $lib.storm.eval($q) )} as [root]' - self.isin(mesg, data) - - mesg = f'Executing storm query via $lib.storm.eval() {{{q}}} as [root]' - self.isin(mesg, data) + await stream.expect('Executing storm query via $lib.storm.eval() {woot} as [root]') + await stream.expect('Executing storm query {return( $lib.storm.eval($q) )} as [root]') async def test_lib_stormlib_storm(self): @@ -147,7 +132,7 @@ async def test_lib_stormlib_storm(self): async def test_lib_stormlib_storm_tasks(self): - with self.getStructuredAsyncLoggerStream('synapse') as stream: + with self.getLoggerStream('synapse') as stream: async with self.getTestCore() as core: @@ -202,6 +187,6 @@ async def doit(): msgs = stream.jsonlines() self.gt(len(msgs), 0) - msgs = [(k.get('message'), k.get('text')) for k in msgs] + msgs = [(k.get('message'), k['params'].get('text')) for k in msgs] self.isin(('Storm runtime cancelled.', '$lib.time.sleep(120)'), msgs) self.isin(('Storm runtime cancelled.', q), msgs) diff --git a/synapse/tests/test_lib_stormtypes.py b/synapse/tests/test_lib_stormtypes.py index af57db570a7..4f76bc9ec53 100644 --- a/synapse/tests/test_lib_stormtypes.py +++ b/synapse/tests/test_lib_stormtypes.py @@ -5848,13 +5848,13 @@ async def getCronJob(text): q = '{$lib.queue.get(foo).put(m3) $s=`m3 {$auto.type} {$auto.iden}` $lib.log.info($s, ({"iden": $auto.iden})) }' text = f'cron.add --minute 17 {q}' async with getCronJob(text) as guid: - with self.getStructuredAsyncLoggerStream('synapse.storm.log', 'm3 cron') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: unixtime += 7 * MINSECS self.eq('m3', await getNextFoo()) - self.true(await stream.wait(6)) + await stream.expect('m3 cron', timeout=6) mesg = stream.jsonlines()[0] self.eq(mesg['message'], f'm3 cron {guid}') - self.eq(mesg['iden'], guid) + self.eq(mesg['params']['iden'], guid) ################## diff --git a/synapse/tests/test_lib_trigger.py b/synapse/tests/test_lib_trigger.py index d2b562c43ea..007b83e86ac 100644 --- a/synapse/tests/test_lib_trigger.py +++ b/synapse/tests/test_lib_trigger.py @@ -54,17 +54,17 @@ async def test_trigger_async_base(self): $lib.log.info($s) [ test:guid="*" +#nodeadd]''' tdef = {'cond': 'node:add', 'form': 'test:str', 'storm': q} await core.view.addTrigger(tdef) - with self.getAsyncLoggerStream('synapse.storm.log', 'f=') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: await core.nodes('[ test:str=foo ]') - self.true(await stream.wait(12)) - self.eq(stream.getvalue().strip(), 'f=test:str v=foo u=root') + await stream.expect('f=test:str v=foo u=root', timeout=12) + self.len(1, await core.nodes('test:guid#nodeadd')) unfo = await core.addUser('someuser') await core.setUserAdmin(unfo.get('iden'), True) - with self.getAsyncLoggerStream('synapse.storm.log', 'f=') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: await core.nodes('[ test:str=bar ]', opts={'user': unfo.get('iden')}) - self.true(await stream.wait(12)) - self.eq(stream.getvalue().strip(), 'f=test:str v=bar u=someuser') + await stream.expect('f=test:str v=bar u=someuser', timeout=12) + self.len(2, await core.nodes('test:guid#nodeadd')) async with self.getTestCore(dirn=dirn) as core: @@ -164,10 +164,9 @@ async def test_trigger_basics(self): $lib.log.info($s) [ test:guid="*" +#nodeadd]''' tdef = {'cond': 'node:add', 'form': 'test:str', 'storm': q} await view.addTrigger(tdef) - with self.getAsyncLoggerStream('synapse.storm.log', 'f=') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: await core.nodes('[ test:str=foo ]') - self.true(await stream.wait(6)) - self.eq(stream.getvalue().strip(), 'f=test:str v=foo u=root') + await stream.expect('f=test:str v=foo u=root', timeout=6) self.len(1, await core.nodes('test:guid#nodeadd')) # node:del case @@ -188,10 +187,10 @@ async def test_trigger_basics(self): 'tag': 'a.*.c'} await view.addTrigger(tdef) await core.nodes('[ test:str=foo +#a.b ]') - with self.getAsyncLoggerStream('synapse.storm.log', 'a.b.c') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: await core.nodes('[ test:str=foo +#a.b.c ]') - self.true(await stream.wait(6)) - self.true(stream.getvalue().strip().startswith('a.b.c\n')) + await stream.expect('a.b.c', timeout=6) + await core.nodes('[ test:str=foo +#a.b.ccc ]') self.len(1, await core.nodes('#count')) self.len(1, await core.nodes('test:str=a.b.c')) @@ -232,11 +231,10 @@ async def test_trigger_basics(self): 'prop': 'test:type10:intprop'} await view.addTrigger(tdef) await core.nodes('[ test:type10=1 ]') - with self.getAsyncLoggerStream('synapse.storm.log', 'pf=') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: await core.nodes('[ test:type10=1 :intprop=25 ]') - self.true(await stream.wait(6)) - buf = stream.getvalue().strip() - self.eq(buf, 'pf=test:type10:intprop pn=intprop') + await stream.expect('pf=', timeout=6) + self.isin('pf=test:type10:intprop pn=intprop', stream.getvalue()) self.len(1, await core.nodes('test:guid#propset')) # Test re-setting doesn't fire @@ -268,13 +266,12 @@ async def test_trigger_basics(self): q = '+test:str~=log $s=`test {$auto.type} {$auto.iden}` $lib.log.info($s, ({"iden": $auto.iden}))' tdef = {'cond': 'node:add', 'form': 'test:str', 'storm': q} await view.addTrigger(tdef) - with self.getStructuredAsyncLoggerStream('synapse.storm.log', 'test trigger') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: await core.nodes('[ test:str=logit ]') - self.true(await stream.wait(6)) + await stream.expect(f'test trigger {tdef.get("iden")}', timeout=6) + msgs = stream.jsonlines() - mesg = [m for m in msgs if m.get('iden') == tdef.get('iden')][0] - self.eq(mesg['message'], f'test trigger {tdef.get("iden")}') - self.eq(mesg['iden'], tdef.get('iden')) + self.true(len([m for m in msgs if m['params'].get('iden') == tdef.get('iden')][0])) # Attempting to add trigger with existing iden raises with self.raises(s_exc.DupIden): @@ -964,9 +961,9 @@ async def test_trigger_feed_data(self): tdef = {'cond': 'node:add', 'form': 'test:int', 'storm': q} await core1.view.addTrigger(tdef) - with self.getAsyncLoggerStream('synapse.storm.log', 'f=') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: await core1.addFeedData('syn.nodes', podes) - self.true(await stream.wait(6)) + await stream.expect('f=', timeout=6) valu = stream.getvalue().strip() self.isin('f=test:int v=1 u=root', valu) self.isin('f=test:int v=2 u=root', valu) diff --git a/synapse/tests/test_lib_view.py b/synapse/tests/test_lib_view.py index 3290b18c562..21858221d90 100644 --- a/synapse/tests/test_lib_view.py +++ b/synapse/tests/test_lib_view.py @@ -649,11 +649,9 @@ async def test_lib_view_savenodeedits_telepath(self): await prox.saveNodeEdits(edits, {}) self.eq(cm.exception.get('mesg'), "Meta argument requires user key to be a guid, got user=''") - with self.getAsyncLoggerStream('synapse.storm.log', 'u=') as stream: + with self.getLoggerStream('synapse.storm.log') as stream: await prox.saveNodeEdits(edits, {'time': s_common.now(), 'user': guid}) - self.true(await stream.wait(6)) - valu = stream.getvalue().strip() - self.isin(f'u={guid}', valu) + await stream.expect(f'u={guid}', timeout=6) self.len(1, await core.nodes('test:guid#foo', opts={'view': view})) self.len(1, await core.nodes('test:str=foo', opts={'view': view})) diff --git a/synapse/tests/test_model_dns.py b/synapse/tests/test_model_dns.py index d9bf555a548..7f2f1b406b4 100644 --- a/synapse/tests/test_model_dns.py +++ b/synapse/tests/test_model_dns.py @@ -176,16 +176,14 @@ async def test_model_dns_request(self): q = '[inet:dns:query=(tcp://1.2.3.4, "foo*.haha.com", 1)]' self.len(1, await core.nodes(q)) q = 'inet:dns:query=(tcp://1.2.3.4, "", 1) :name -> inet:fqdn' - with self.getLoggerStream('synapse.lib.ast', - 'Cannot generate fqdn index bytes for a empty string') as stream: + with self.getLoggerStream('synapse.lib.ast') as stream: self.len(0, await core.nodes(q)) - self.true(stream.wait(1)) + await stream.expect('Cannot generate fqdn index bytes for a empty string', timeout=1) q = 'inet:dns:query=(tcp://1.2.3.4, "foo*.haha.com", 1) :name -> inet:fqdn' - with self.getLoggerStream('synapse.lib.ast', - 'Wild card may only appear at the beginning') as stream: + with self.getLoggerStream('synapse.lib.ast') as stream: self.len(0, await core.nodes(q)) - self.true(stream.wait(1)) + await stream.expect('Wild card may only appear at the beginning', timeout=1) async def test_forms_dns_simple(self): diff --git a/synapse/tests/test_telepath.py b/synapse/tests/test_telepath.py index 5fcf9d64ba7..ce04cf18405 100644 --- a/synapse/tests/test_telepath.py +++ b/synapse/tests/test_telepath.py @@ -942,14 +942,13 @@ async def dostuff(self, x): async with await s_telepath.open(urls) as targ: - with self.getAsyncLoggerStream('synapse.telepath', 'Connect call failed') as stream: + with self.getLoggerStream('synapse.telepath') as stream: await targ.waitready() # Verify the password doesn't leak into the log - self.true(await stream.wait(2)) - stream.seek(0) - mesgs = stream.read() + await stream.expect('Connect call failed', timeout=2) + mesgs = stream.getvalue() self.notin('password', mesgs) prox00 = await targ.proxy(timeout=12) @@ -968,18 +967,17 @@ async def onlink(proxy, urlinfo): _url = s_telepath.zipurl(urlinfo) logger.info(f'Connected to url={_url}') - with self.getAsyncLoggerStream('synapse.tests.test_telepath', - f'Connected to url=tcp://127.0.0.1:{addr1[1]}/foo') as stream: + with self.getLoggerStream('synapse.tests.test_telepath') as stream: async with await s_telepath.open(url1, onlink=onlink) as targ: - self.true(await stream.wait(timeout=12)) + await stream.expect(f'Connected to url=tcp://127.0.0.1:{addr1[1]}/foo', timeout=12) # Coverage async def badonlink(proxy, urlinfo): raise ValueError('oopsie') - with self.getAsyncLoggerStream('synapse.telepath', 'onlink: ') as stream: + with self.getLoggerStream('synapse.telepath') as stream: async with await s_telepath.open(url1, onlink=badonlink) as targ: - self.true(await stream.wait(timeout=12)) + await stream.expect('onlink: ', timeout=12) await dmon0.fini() await dmon1.fini() @@ -1088,8 +1086,7 @@ async def test_link_fini_breaking_tasks(self): async with await s_telepath.openurl(url) as proxy: - with self.getAsyncLoggerStream('synapse.daemon', - 'task sleepg') as stream: + with self.getLoggerStream('synapse.daemon') as stream: # Fire up an async generator which will yield a message then # wait for a while so that our break will tear it down @@ -1100,7 +1097,7 @@ async def test_link_fini_breaking_tasks(self): # Ensure that the sleepg function got canceled. self.true(await asyncio.wait_for(foo.sleepg_evt.wait(), timeout=6)) # Ensure we logged the cancellation. - self.true(await stream.wait(6)) + await stream.expect('task sleepg', timeout=6) async def test_link_fini_breaking_tasks2(self): ''' @@ -1409,7 +1406,7 @@ async def test_telepath_exception_logging(self): async with await s_telepath.openurl(f'tcp://127.0.0.1:{port}', user='root', passwd='secret') as prox: - with self.getAsyncLoggerStream('synapse.daemon', 'error during task: callStorm') as stream: + with self.getLoggerStream('synapse.daemon') as stream: task = asyncio.create_task(prox.callStorm('$lib.time.sleep(60)')) await asyncio.sleep(2) @@ -1420,15 +1417,16 @@ async def test_telepath_exception_logging(self): except asyncio.CancelledError: pass - self.false(await stream.wait(timeout=0.5)) + with self.raises(s_exc.SynErr): + await stream.expect('error during task: callStorm', timeout=0.5) - with self.getAsyncLoggerStream('synapse.daemon', 'error during task: callStorm') as stream: + with self.getLoggerStream('synapse.daemon') as stream: try: await prox.callStorm('[inet:newp=invalid]') except Exception: pass - self.true(await stream.wait(timeout=6)) + await stream.expect('error during task: callStorm', timeout=6) async def test_telepath_hostname_synerr(self): self.thisHostMustNot(platform='darwin') diff --git a/synapse/tests/test_tools_cellauth.py b/synapse/tests/test_tools_cellauth.py index 6723382fa56..8ba60be7271 100644 --- a/synapse/tests/test_tools_cellauth.py +++ b/synapse/tests/test_tools_cellauth.py @@ -18,7 +18,7 @@ async def test_cellauth_bad(self): outp = self.getTestOutp() self.eq(await s_cellauth.main(argv, outp), 1) outp.expect('the following arguments are required:') - outp.expect('WARNING: "synapse.tools.cellauth" is deprecated in 2.164.0 and will be removed in 3.0.0') + outp.expect('WARNING: synapse.tools.cellauth is deprecated in 2.164.0 and will be removed in 3.0.0') outp.clear() argv = [coreurl, 'modify', '--adduser', 'foo', '--object', 'foo:bar'] diff --git a/synapse/tests/test_tools_hiveload.py b/synapse/tests/test_tools_hiveload.py index b35f0219861..a052b115cc7 100644 --- a/synapse/tests/test_tools_hiveload.py +++ b/synapse/tests/test_tools_hiveload.py @@ -48,7 +48,7 @@ def _getOldSynVers(self): with mock.patch('synapse.telepath.Proxy._getSynVers', _getOldSynVers): outp = self.getTestOutp() retn = await s_hiveload.main(argv, outp=outp) - outp.expect('WARNING: "synapse.tools.hive.load" is deprecated in 2.167.0 and will be removed in 3.0.0') + outp.expect('WARNING: synapse.tools.hive.load is deprecated in 2.167.0 and will be removed in 3.0.0') outp.expect('Hive version 0.0.0 is outside of the hive.load supported range') self.eq(1, retn) diff --git a/synapse/tests/test_tools_hivesave.py b/synapse/tests/test_tools_hivesave.py index 1b2f5711ca0..edf7b9eeeee 100644 --- a/synapse/tests/test_tools_hivesave.py +++ b/synapse/tests/test_tools_hivesave.py @@ -28,7 +28,7 @@ def _getOldSynVers(self): with mock.patch('synapse.telepath.Proxy._getSynVers', _getOldSynVers): outp = self.getTestOutp() retn = await s_hivesave.main(argv, outp=outp) - outp.expect('WARNING: "synapse.tools.hive.save" is deprecated in 2.167.0 and will be removed in 3.0.0') + outp.expect('WARNING: synapse.tools.hive.save is deprecated in 2.167.0 and will be removed in 3.0.0') outp.expect('Hive version 0.0.0 is outside of the hive.save supported range') self.eq(1, retn) diff --git a/synapse/tests/test_tools_service_demote.py b/synapse/tests/test_tools_service_demote.py index ca0f015e006..7907ce566ae 100644 --- a/synapse/tests/test_tools_service_demote.py +++ b/synapse/tests/test_tools_service_demote.py @@ -52,13 +52,13 @@ async def test_tool_demote_base(self): with self.getLoggerStream('synapse') as stream: argv = ['--url', cell01.getLocalUrl(), '--timeout', '12'] self.eq(1, await s_tools_demote.main(argv, outp=outp)) - stream.expect('...error retrieving nexus index for') + await stream.expect('...error retrieving nexus index for', timeout=1) with mock.patch.object(cell00, 'promote', boom): with self.getLoggerStream('synapse') as stream: argv = ['--url', cell01.getLocalUrl(), '--timeout', '12'] self.eq(1, await s_tools_demote.main(argv, outp=outp)) - stream.expect('...error promoting') + await stream.expect('...error promoting', timeout=1) with mock.patch.object(cell01, '_getDemotePeers', boom): with self.getLoggerStream('synapse') as stream: @@ -66,7 +66,7 @@ async def test_tool_demote_base(self): outp.clear() self.eq(1, await s_tools_demote.main(argv, outp=outp)) outp.expect('Error while demoting service') - stream.expect('error during task: demote') + await stream.expect('error during task: demote', timeout=1) self.false(cell00.isactive) self.true(cell01.isactive) @@ -85,7 +85,7 @@ async def test_tool_demote_base(self): with self.getLoggerStream('synapse') as stream: argv = ['--url', cell00.getLocalUrl(), '--timeout', '12'] self.eq(1, await s_tools_demote.main(argv, outp=outp)) - stream.expect('...no suitable services discovered.') + await stream.expect('...no suitable services discovered.', timeout=1) async def test_tool_demote_no_features(self): @@ -107,8 +107,8 @@ async def test_tool_demote_no_features(self): outp = self.getTestOutp() argv = ['--url', cell00.getLocalUrl()] - with self.getAsyncLoggerStream('synapse.daemon') as stream: + with self.getLoggerStream('synapse.daemon') as stream: self.eq(1, await s_tools_demote.main(argv, outp=outp)) - stream.expect('AHA server does not support feature: getAhaSvcsByIden >= 1') + await stream.expect('AHA server does not support feature: getAhaSvcsByIden >= 1', timeout=1) + outp.expect('Error while demoting service') - outp.expect('AHA server does not support feature: getAhaSvcsByIden') diff --git a/synapse/tests/test_tools_service_shutdown.py b/synapse/tests/test_tools_service_shutdown.py index 2f0f25297e2..f4c8ebdd724 100644 --- a/synapse/tests/test_tools_service_shutdown.py +++ b/synapse/tests/test_tools_service_shutdown.py @@ -90,6 +90,6 @@ async def test_tool_shutdown_no_features(self): # confirm that graceful shutdown with peers also demotes... outp = self.getTestOutp() argv = ['--url', cell00.getLocalUrl(), '--timeout', '12'] - with self.getAsyncLoggerStream('synapse.daemon') as stream: + with self.getLoggerStream('synapse.daemon') as stream: self.eq(1, await s_t_shutdown.main(argv, outp=outp)) - stream.expect('AHA server does not support feature: getAhaSvcsByIden >= 1') + await stream.expect('AHA server does not support feature: getAhaSvcsByIden >= 1') diff --git a/synapse/tests/test_utils.py b/synapse/tests/test_utils.py index b5894ea90f4..4fe46eb2556 100644 --- a/synapse/tests/test_utils.py +++ b/synapse/tests/test_utils.py @@ -104,35 +104,14 @@ def div0(): async def test_syntest_logstream(self): with self.getLoggerStream('synapse.tests.test_utils') as stream: logger.error('ruh roh i am a error message') + await stream.expect('ruh roh i am a error message', timeout=1) - stream.expect('error message') with self.raises(s_exc.SynErr): - stream.expect('does not exist') + await stream.expect('does not exist', timeout=0.01) - self.eq(str(stream), 'ruh roh i am a error message\n') - self.true(repr(stream).endswith('valu: ruh roh i am a error message>')) - self.true(repr(stream).startswith('')) - self.true(repr(stream).startswith(' typing.List[dict]: - '''Get the messages as jsonlines. May throw Json errors if the captured stream is not jsonlines.''' - return jsonlines(self.getvalue()) + if escape: + text = regex.escape(text) - def expect(self, substr: str): - ''' - Check if a string is present in the messages captured by StreamEvent. + regx = regex.compile(text) - Args: - substr (str): String to check for the existence of. - ''' - valu = self.getvalue() - if valu.find(substr) == -1: - mesg = '%s.expect(%s) not in %s' % (self.__class__.__name__, substr, valu) - raise s_exc.SynErr(mesg=mesg) + while True: - def noexpect(self, substr: str): - valu = self.getvalue() - if valu.find(substr) != -1: - mesg = '%s.noexpect(%s) in %s' % (self.__class__.__name__, substr, valu) - raise s_exc.SynErr(mesg=mesg) + offs = 0 + tally = 0 -class StreamEvent(_StreamIOMixin, threading.Event): - ''' - A combination of a io.StringIO object and a threading.Event object. - ''' - def __init__(self, *args, **kwargs): - _StreamIOMixin.__init__(self, *args, **kwargs) - threading.Event.__init__(self) - self.mesg = '' - - def __repr__(self): - cls = self.__class__ - status = 'set' if self._flag else 'unset' - if valu := str(self): - valu = s_common.trimText(valu).strip() - status = f'{status}, valu: {valu}' - return f"<{cls.__module__}.{cls.__qualname__} at {id(self):#x}: {status}>" - -class AsyncStreamEvent(_StreamIOMixin, asyncio.Event): - ''' - A combination of a io.StringIO object and an asyncio.Event object. - ''' - def __init__(self, *args, **kwargs): - _StreamIOMixin.__init__(self, *args, **kwargs) - asyncio.Event.__init__(self) - self.mesg = '' - - def __repr__(self): - cls = self.__class__ - status = 'set' if self._value else 'unset' - if self._waiters: - status = f'{status}, waiters:{len(self._waiters)}' - if valu := str(self): - valu = s_common.trimText(valu).strip() - status = f'{status}, valu: {valu}' - return f"<{cls.__module__}.{cls.__qualname__} at {id(self):#x}: {status}>" - - async def wait(self, timeout=None): - if timeout is None: - return await asyncio.Event.wait(self) - return await s_coro.event_wait(self, timeout=timeout) + for line in self._lines[offs:]: + offs += 1 + + if regx.search(line) is not None: + tally += 1 + + if tally >= count: + return True + + await self._event.wait() + self._event.clear() + + def jsonlines(self): + return jsonlines(self.getvalue()) class HttpReflector(s_httpapi.Handler): '''Test handler which reflects get/post data back to the caller''' @@ -1073,6 +1043,10 @@ def __init__(self, *args, **kwargs): if inspect.iscoroutinefunction(attr) and s.startswith('test_') and inspect.ismethod(attr): setattr(self, s, s_glob.synchelp(attr)) + def tearDown(self): + s_logging.reset() + return super().tearDown() + def checkNode(self, node, expected): ex_ndef, ex_props = expected self.eq(node.ndef, ex_ndef) @@ -1321,12 +1295,12 @@ def withCliPromptMock(self): @contextlib.asynccontextmanager async def withSetLoggingMock(self): ''' - Context manager to mock calls to the setlogging function to avoid unittests calling logging.basicconfig. + Context manager to mock calls to the logging setup function to avoid unittests calling logging.basicconfig. Returns: mock.MagicMock: Yields a mock.MagicMock object. ''' - with mock.patch('synapse.common.setlogging', + with mock.patch('synapse.lib.logging.setup', PickleableMagicMock(return_value=dict())) as patch: # type: mock.MagicMock yield patch @@ -1748,162 +1722,29 @@ def getTestFilePath(self, *names): return os.path.join(path, 'files', *names) @contextlib.contextmanager - def getLoggerStream(self, logname, mesg=''): - ''' - Get a logger and attach a io.StringIO object to the logger to capture log messages. - - Args: - logname (str): Name of the logger to get. - mesg (str): A string which, if provided, sets the StreamEvent event if a message - containing the string is written to the log. - - Examples: - Do an action and get the stream of log messages to check against:: - - with self.getLoggerStream('synapse.foo.bar') as stream: - # Do something that triggers a log message - doSomething() - - stream.seek(0) - mesgs = stream.read() - # Do something with messages - - Do an action and wait for a specific log message to be written:: - - with self.getLoggerStream('synapse.foo.bar', 'big badda boom happened') as stream: - # Do something that triggers a log message - doSomething() - stream.wait(timeout=10) # Wait for the mesg to be written to the stream - - stream.seek(0) - mesgs = stream.read() - # Do something with messages - - You can also reset the message and wait for another message to occur:: - - with self.getLoggerStream('synapse.foo.bar', 'big badda boom happened') as stream: - # Do something that triggers a log message - doSomething() - stream.wait(timeout=10) - stream.setMesg('yo dawg') # This will now wait for the 'yo dawg' string to be written. - stream.wait(timeout=10) - - stream.seek(0) - mesgs = stream.read() - # Do something with messages - - Notes: - This **only** captures logs for the current process. - - Yields: - StreamEvent: A StreamEvent object - ''' - stream = StreamEvent() - stream.setMesg(mesg) - handler = logging.StreamHandler(stream) - slogger = logging.getLogger(logname) - slogger.addHandler(handler) - level = slogger.level - slogger.setLevel('DEBUG') - try: - yield stream - except Exception: # pragma: no cover - raise - finally: - slogger.removeHandler(handler) - slogger.setLevel(level) - - @contextlib.contextmanager - def getAsyncLoggerStream(self, logname, mesg='') -> contextlib.AbstractContextManager[StreamEvent, None, None]: - ''' - Async version of getLoggerStream. - - Args: - logname (str): Name of the logger to get. - mesg (str): A string which, if provided, sets the StreamEvent event if a message containing the string is written to the log. - - Notes: - The event object mixed in for the AsyncStreamEvent is a asyncio.Event object. - This requires the user to await the Event specific calls as neccesary. - - Examples: - Do an action and wait for a specific log message to be written:: - - with self.getAsyncLoggerStream('synapse.foo.bar', - 'big badda boom happened') as stream: - # Do something that triggers a log message - await doSomething() - # Wait for the mesg to be written to the stream - await stream.wait(timeout=10) + def getLoggerStream(self, name, struct=True): - stream.seek(0) - mesgs = stream.read() - # Do something with messages - - Returns: - AsyncStreamEvent: An AsyncStreamEvent object. - ''' - stream = AsyncStreamEvent() - stream.setMesg(mesg) + stream = LoggerStream() + logger = logging.getLogger(name) handler = logging.StreamHandler(stream) - slogger = logging.getLogger(logname) - slogger.addHandler(handler) - level = slogger.level - slogger.setLevel('DEBUG') - try: - yield stream - except Exception: # pragma: no cover - raise - finally: - slogger.removeHandler(handler) - slogger.setLevel(level) - - @contextlib.contextmanager - def getStructuredAsyncLoggerStream(self, logname, mesg='') -> contextlib.AbstractContextManager[AsyncStreamEvent, None, None]: - ''' - Async version of getLoggerStream which uses structured logging. - - Args: - logname (str): Name of the logger to get. - mesg (str): A string which, if provided, sets the StreamEvent event if a message containing the string is written to the log. - - Notes: - The event object mixed in for the AsyncStreamEvent is a asyncio.Event object. - This requires the user to await the Event specific calls as needed. - The messages written to the stream will be JSON lines. - Examples: - Do an action and wait for a specific log message to be written:: + oldlevel = logger.level - with self.getStructuredAsyncLoggerStream('synapse.foo.bar', - '"some JSON string"') as stream: - # Do something that triggers a log message - await doSomething() - # Wait for the mesg to be written to the stream - await stream.wait(timeout=10) + fmtclass = s_logging.JsonFormatter + if not struct: + fmtclass = s_logging.TextFormatter - msgs = stream.jsonlines() - # Do something with messages + handler.setFormatter(fmtclass()) + logger.setLevel(logging.DEBUG) - Returns: - AsyncStreamEvent: An AsyncStreamEvent object. - ''' - stream = AsyncStreamEvent() - stream.setMesg(mesg) - handler = logging.StreamHandler(stream) - slogger = logging.getLogger(logname) - formatter = s_structlog.JsonFormatter() - handler.setFormatter(formatter) - slogger.addHandler(handler) - level = slogger.level - slogger.setLevel('DEBUG') + logger.addHandler(handler) try: yield stream - except Exception: # pragma: no cover + except Exception: # pragma: no cover raise finally: - slogger.removeHandler(handler) - slogger.setLevel(level) + logger.setLevel(oldlevel) + logger.removeHandler(handler) @contextlib.asynccontextmanager async def getHttpSess(self, auth=None, port=None): diff --git a/synapse/tools/autodoc.py b/synapse/tools/autodoc.py index b8374a513b9..339447dd1bc 100644 --- a/synapse/tools/autodoc.py +++ b/synapse/tools/autodoc.py @@ -8,5 +8,4 @@ curv='v2.225.0') if __name__ == '__main__': # pragma: no cover - s_common.setlogging(logger, 'DEBUG') s_cmd.exitmain(main) diff --git a/synapse/tools/backup.py b/synapse/tools/backup.py index 256e289edc7..3f1b36432c8 100644 --- a/synapse/tools/backup.py +++ b/synapse/tools/backup.py @@ -8,5 +8,4 @@ curv='v2.225.0') if __name__ == '__main__': # pragma: no cover - s_common.setlogging(logger, defval='DEBUG') s_cmd.exitmain(main) diff --git a/synapse/tools/cellauth.py b/synapse/tools/cellauth.py index 5df13268d4a..d719a5a48c4 100644 --- a/synapse/tools/cellauth.py +++ b/synapse/tools/cellauth.py @@ -334,5 +334,4 @@ def makeargparser(): return pars if __name__ == '__main__': # pragma: no cover - s_common.setlogging(logger, 'DEBUG') s_cmd.exitmain(main) diff --git a/synapse/tools/cmdr.py b/synapse/tools/cmdr.py index 5de0861313e..b8aeaf6ac43 100644 --- a/synapse/tools/cmdr.py +++ b/synapse/tools/cmdr.py @@ -45,12 +45,10 @@ async def main(argv): # pragma: no cover print('usage: python -m synapse.tools.cmdr []') return 1 - s_common.setlogging(logger, 'WARNING') - async with s_telepath.withTeleEnv(): await _main(argv) return 0 if __name__ == '__main__': # pragma: no cover warnings.filterwarnings("default", category=PendingDeprecationWarning) - s_cmd.exitmain(main) + s_cmd.exitmain(main, logconf={'level': 'WARNING'}) diff --git a/synapse/tools/cortex/feed.py b/synapse/tools/cortex/feed.py index 6451d790086..a391db90c7b 100644 --- a/synapse/tools/cortex/feed.py +++ b/synapse/tools/cortex/feed.py @@ -147,5 +147,4 @@ def getArgParser(outp): return pars if __name__ == '__main__': # pragma: no cover - s_common.setlogging(logger, 'DEBUG') s_cmd.exitmain(main) diff --git a/synapse/tools/feed.py b/synapse/tools/feed.py index 3ebf81d3272..0d7cbc323aa 100644 --- a/synapse/tools/feed.py +++ b/synapse/tools/feed.py @@ -8,5 +8,4 @@ curv='v2.225.0') if __name__ == '__main__': # pragma: no cover - s_common.setlogging(logger, 'DEBUG') s_cmd.exitmain(main) diff --git a/synapse/tools/pkgs/gendocs.py b/synapse/tools/pkgs/gendocs.py index 56509628ef5..0ef6ff811f2 100644 --- a/synapse/tools/pkgs/gendocs.py +++ b/synapse/tools/pkgs/gendocs.py @@ -10,6 +10,5 @@ curv='v2.225.0') if __name__ == '__main__': # pragma: no cover - s_common.setlogging(logger, 'DEBUG') logging.getLogger('vcr').setLevel(logging.WARNING) s_cmd.exitmain(main) diff --git a/synapse/tools/rstorm.py b/synapse/tools/rstorm.py index 2da4e01a0cb..023761b7665 100644 --- a/synapse/tools/rstorm.py +++ b/synapse/tools/rstorm.py @@ -8,5 +8,4 @@ curv='v2.225.0') if __name__ == '__main__': # pragma: no cover - s_common.setlogging(logger) s_cmd.exitmain(main) diff --git a/synapse/tools/service/backup.py b/synapse/tools/service/backup.py index a91e3d51327..ec11a237f8c 100644 --- a/synapse/tools/service/backup.py +++ b/synapse/tools/service/backup.py @@ -177,5 +177,4 @@ def parse_args(argv): return args if __name__ == '__main__': # pragma: no cover - s_common.setlogging(logger, defval='DEBUG') s_cmd.exitmain(main) diff --git a/synapse/tools/storm/pkg/doc.py b/synapse/tools/storm/pkg/doc.py index 55306877b37..52a7c2b37fa 100644 --- a/synapse/tools/storm/pkg/doc.py +++ b/synapse/tools/storm/pkg/doc.py @@ -171,6 +171,5 @@ async def main(argv, outp=s_output.stdout): return 0 if __name__ == '__main__': # pragma: no cover - s_common.setlogging(logger, 'DEBUG') logging.getLogger('vcr').setLevel(logging.WARNING) s_cmd.exitmain(main) diff --git a/synapse/tools/utils/autodoc.py b/synapse/tools/utils/autodoc.py index a82051a1852..0b8b3fe6114 100644 --- a/synapse/tools/utils/autodoc.py +++ b/synapse/tools/utils/autodoc.py @@ -1021,5 +1021,4 @@ def getArgParser(outp): return pars if __name__ == '__main__': # pragma: no cover - s_common.setlogging(logger, 'DEBUG') s_cmd.exitmain(main) diff --git a/synapse/tools/utils/rstorm.py b/synapse/tools/utils/rstorm.py index f246963d6aa..256095a893b 100644 --- a/synapse/tools/utils/rstorm.py +++ b/synapse/tools/utils/rstorm.py @@ -31,5 +31,4 @@ async def main(argv, outp=s_output.stdout): outp.printf(line, addnl=False) if __name__ == '__main__': # pragma: no cover - s_common.setlogging(logger) s_cmd.exitmain(main) diff --git a/synapse/utils/getrefs.py b/synapse/utils/getrefs.py index 178782cec51..5ac00bcb34b 100644 --- a/synapse/utils/getrefs.py +++ b/synapse/utils/getrefs.py @@ -92,6 +92,5 @@ def parse_args(argv): return args if __name__ == '__main__': # pragma: no cover - s_common.setlogging(logger, defval='DEBUG') argv = parse_args(sys.argv[1:]) sys.exit(main(argv))