@@ -279,7 +279,7 @@ export class RepoIndexManager {
279279 private async runJob ( job : Job < JobPayload > , signal : AbortSignal ) {
280280 const id = job . data . jobId ;
281281 const logger = createJobLogger ( id ) ;
282- logger . info ( `Running ${ job . data . type } job ${ id } for repo ${ job . data . repoName } (id: ${ job . data . repoId } )` ) ;
282+ logger . debug ( `Running ${ job . data . type } job ${ id } for repo ${ job . data . repoName } (id: ${ job . data . repoId } )` ) ;
283283
284284 const currentStatus = await this . db . repoIndexingJob . findUniqueOrThrow ( {
285285 where : {
@@ -383,7 +383,7 @@ export class RepoIndexManager {
383383 signal,
384384 } ) ;
385385
386- logger . info ( `Fetching ${ repo . name } (id: ${ repo . id } )...` ) ;
386+ logger . debug ( `Fetching ${ repo . name } (id: ${ repo . id } )...` ) ;
387387 const { durationMs } = await measure ( ( ) => fetchRepository ( {
388388 cloneUrl : cloneUrlMaybeWithToken ,
389389 authHeader,
@@ -395,10 +395,9 @@ export class RepoIndexManager {
395395 } ) ) ;
396396 const fetchDuration_s = durationMs / 1000 ;
397397
398- process . stdout . write ( '\n' ) ;
399- logger . info ( `Fetched ${ repo . name } (id: ${ repo . id } ) in ${ fetchDuration_s } s` ) ;
398+ logger . debug ( `Fetched ${ repo . name } (id: ${ repo . id } ) in ${ fetchDuration_s } s` ) ;
400399 } else if ( ! isReadOnly ) {
401- logger . info ( `Cloning ${ repo . name } (id: ${ repo . id } )...` ) ;
400+ logger . debug ( `Cloning ${ repo . name } (id: ${ repo . id } )...` ) ;
402401
403402 const { durationMs } = await measure ( ( ) => cloneRepository ( {
404403 cloneUrl : cloneUrlMaybeWithToken ,
@@ -411,8 +410,7 @@ export class RepoIndexManager {
411410 } ) ) ;
412411 const cloneDuration_s = durationMs / 1000 ;
413412
414- process . stdout . write ( '\n' ) ;
415- logger . info ( `Cloned ${ repo . name } (id: ${ repo . id } ) in ${ cloneDuration_s } s` ) ;
413+ logger . debug ( `Cloned ${ repo . name } (id: ${ repo . id } ) in ${ cloneDuration_s } s` ) ;
416414 }
417415
418416 // Regardless of clone or fetch, always upsert the git config for the repo.
@@ -478,11 +476,11 @@ export class RepoIndexManager {
478476 revisions = revisions . slice ( 0 , 64 ) ;
479477 }
480478
481- logger . info ( `Indexing ${ repo . name } (id: ${ repo . id } )...` ) ;
479+ logger . debug ( `Indexing ${ repo . name } (id: ${ repo . id } )...` ) ;
482480 try {
483481 const { durationMs } = await measure ( ( ) => indexGitRepository ( repo , this . settings , revisions , signal ) ) ;
484482 const indexDuration_s = durationMs / 1000 ;
485- logger . info ( `Indexed ${ repo . name } (id: ${ repo . id } ) in ${ indexDuration_s } s` ) ;
483+ logger . debug ( `Indexed ${ repo . name } (id: ${ repo . id } ) in ${ indexDuration_s } s` ) ;
486484 } catch ( error ) {
487485 // Clean up any temporary shard files left behind by the failed indexing operation.
488486 // Zoekt creates .tmp files during indexing which can accumulate if indexing fails repeatedly.
@@ -497,15 +495,15 @@ export class RepoIndexManager {
497495 private async cleanupRepository ( repo : Repo , logger : Logger ) {
498496 const { path : repoPath , isReadOnly } = getRepoPath ( repo ) ;
499497 if ( existsSync ( repoPath ) && ! isReadOnly ) {
500- logger . info ( `Deleting repo directory ${ repoPath } ` ) ;
498+ logger . debug ( `Deleting repo directory ${ repoPath } ` ) ;
501499 await rm ( repoPath , { recursive : true , force : true } ) ;
502500 }
503501
504502 const shardPrefix = getShardPrefix ( repo . orgId , repo . id ) ;
505503 const files = ( await readdir ( INDEX_CACHE_DIR ) ) . filter ( file => file . startsWith ( shardPrefix ) ) ;
506504 for ( const file of files ) {
507505 const filePath = `${ INDEX_CACHE_DIR } /${ file } ` ;
508- logger . info ( `Deleting shard file ${ filePath } ` ) ;
506+ logger . debug ( `Deleting shard file ${ filePath } ` ) ;
509507 await rm ( filePath , { force : true } ) ;
510508 }
511509 }
@@ -564,14 +562,14 @@ export class RepoIndexManager {
564562 }
565563 } ) ;
566564
567- logger . info ( `Completed index job ${ job . data . jobId } for repo ${ repo . name } (id: ${ repo . id } )` ) ;
565+ logger . debug ( `Completed index job ${ job . data . jobId } for repo ${ repo . name } (id: ${ repo . id } )` ) ;
568566 }
569567 else if ( jobData . type === RepoIndexingJobType . CLEANUP ) {
570568 const repo = await this . db . repo . delete ( {
571569 where : { id : jobData . repoId } ,
572570 } ) ;
573571
574- logger . info ( `Completed cleanup job ${ job . data . jobId } for repo ${ repo . name } (id: ${ repo . id } )` ) ;
572+ logger . debug ( `Completed cleanup job ${ job . data . jobId } for repo ${ repo . name } (id: ${ repo . id } )` ) ;
575573 }
576574
577575 // Track metrics for successful job
@@ -604,7 +602,7 @@ export class RepoIndexManager {
604602 // or if it is being retried.
605603 const jobState = await job . getState ( ) ;
606604 if ( jobState !== 'failed' ) {
607- jobLogger . warn ( `Job ${ job . id } for repo ${ job . data . repoName } (id: ${ job . data . repoId } ) failed. Retrying...` ) ;
605+ jobLogger . warn ( `Job ${ job . id } for repo ${ job . data . repoName } (id: ${ job . data . repoId } ) failed. Retrying... Reason: ${ error . message } ` ) ;
608606 return ;
609607 }
610608
@@ -626,7 +624,7 @@ export class RepoIndexManager {
626624 this . promClient . activeRepoIndexJobs . dec ( { repo : job . data . repoName , type : jobTypeLabel } ) ;
627625 this . promClient . repoIndexJobFailTotal . inc ( { repo : job . data . repoName , type : jobTypeLabel } ) ;
628626
629- jobLogger . error ( `Failed job ${ job . data . jobId } for repo ${ repo . name } (id: ${ repo . id } ).` ) ;
627+ jobLogger . error ( `Failed job ${ job . data . jobId } for repo ${ repo . name } (id: ${ repo . id } ). Reason: ${ error . message } ` ) ;
630628
631629 captureEvent ( 'backend_repo_index_job_failed' , {
632630 repoId : job . data . repoId ,
@@ -664,7 +662,7 @@ export class RepoIndexManager {
664662 const existingIds = new Set ( existingRepos . map ( r => r . id ) ) ;
665663 for ( const [ repoId , repoPath ] of repoIdToPath ) {
666664 if ( ! existingIds . has ( repoId ) ) {
667- logger . info ( `Removing orphaned repo directory with no DB record: ${ repoPath } ` ) ;
665+ logger . debug ( `Removing orphaned repo directory with no DB record: ${ repoPath } ` ) ;
668666 await rm ( repoPath , { recursive : true , force : true } ) ;
669667 }
670668 }
@@ -695,7 +693,7 @@ export class RepoIndexManager {
695693 if ( ! existingIds . has ( repoId ) ) {
696694 for ( const entry of shards ) {
697695 const shardPath = `${ INDEX_CACHE_DIR } /${ entry } ` ;
698- logger . info ( `Removing orphaned index shard with no DB record: ${ shardPath } ` ) ;
696+ logger . debug ( `Removing orphaned index shard with no DB record: ${ shardPath } ` ) ;
699697 await rm ( shardPath , { force : true } ) ;
700698 }
701699 }
0 commit comments