Skip to content

Commit 8db47b5

Browse files
committed
fix(archives): improve error handling in tar extraction to prevent unhandled errors
- Use process.nextTick instead of setImmediate for stream destruction - Add destroyScheduled flag to prevent multiple destroy calls - Attach error handler to extractStream before pipeline starts - Ensures errors are properly caught by pipeline for rejection handling
1 parent 6d01689 commit 8db47b5

File tree

1 file changed

+92
-68
lines changed

1 file changed

+92
-68
lines changed

src/archives.ts

Lines changed: 92 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -133,57 +133,69 @@ export async function extractTar(
133133

134134
let totalExtractedSize = 0
135135

136+
let destroyScheduled = false
137+
136138
const extractStream = tarFs.extract(normalizedOutputDir, {
137139
map: (header: { name: string; size?: number; type?: string }) => {
138-
try {
139-
// Check for symlinks
140-
if (header.type === 'symlink' || header.type === 'link') {
141-
const error = new Error(
142-
`Symlink detected in archive: ${header.name}. Symlinks are not supported for security reasons.`,
140+
// Skip if destroy already scheduled
141+
if (destroyScheduled) {
142+
return header
143+
}
144+
145+
// Check for symlinks
146+
if (header.type === 'symlink' || header.type === 'link') {
147+
destroyScheduled = true
148+
process.nextTick(() => {
149+
extractStream.destroy(
150+
new Error(
151+
`Symlink detected in archive: ${header.name}. Symlinks are not supported for security reasons.`,
152+
),
143153
)
144-
// Destroy stream on next tick to avoid unhandled error warnings
145-
setImmediate(() => extractStream.destroy(error))
146-
return header
147-
}
154+
})
155+
return header
156+
}
148157

149-
// Check individual file size
150-
if (header.size && header.size > maxFileSize) {
151-
const error = new Error(
152-
`File size exceeds limit: ${header.name} (${header.size} bytes > ${maxFileSize} bytes)`,
158+
// Check individual file size
159+
if (header.size && header.size > maxFileSize) {
160+
destroyScheduled = true
161+
process.nextTick(() => {
162+
extractStream.destroy(
163+
new Error(
164+
`File size exceeds limit: ${header.name} (${header.size} bytes > ${maxFileSize} bytes)`,
165+
),
153166
)
154-
setImmediate(() => extractStream.destroy(error))
155-
return header
156-
}
167+
})
168+
return header
169+
}
157170

158-
// Check total extracted size
159-
if (header.size) {
160-
totalExtractedSize += header.size
161-
if (totalExtractedSize > maxTotalSize) {
162-
const error = new Error(
163-
`Total extracted size exceeds limit: ${totalExtractedSize} bytes > ${maxTotalSize} bytes`,
171+
// Check total extracted size
172+
if (header.size) {
173+
totalExtractedSize += header.size
174+
if (totalExtractedSize > maxTotalSize) {
175+
destroyScheduled = true
176+
process.nextTick(() => {
177+
extractStream.destroy(
178+
new Error(
179+
`Total extracted size exceeds limit: ${totalExtractedSize} bytes > ${maxTotalSize} bytes`,
180+
),
164181
)
165-
setImmediate(() => extractStream.destroy(error))
166-
return header
167-
}
182+
})
183+
return header
168184
}
169-
170-
return header
171-
} catch (error) {
172-
// Catch any unexpected errors and destroy stream
173-
setImmediate(() => extractStream.destroy(error as Error))
174-
return header
175185
}
186+
187+
return header
176188
},
177189
strip,
178190
})
179191

180-
const readStream = createReadStream(archivePath)
181-
182-
// Handle stream errors to prevent unhandled rejections
192+
// Attach error handler before starting pipeline to catch errors
183193
extractStream.on('error', () => {
184194
// Error will be caught by pipeline
185195
})
186196

197+
const readStream = createReadStream(archivePath)
198+
187199
try {
188200
await pipeline(readStream, extractStream)
189201
} catch (error) {
@@ -217,57 +229,69 @@ export async function extractTarGz(
217229

218230
let totalExtractedSize = 0
219231

232+
let destroyScheduled = false
233+
220234
const extractStream = tarFs.extract(normalizedOutputDir, {
221235
map: (header: { name: string; size?: number; type?: string }) => {
222-
try {
223-
// Check for symlinks
224-
if (header.type === 'symlink' || header.type === 'link') {
225-
const error = new Error(
226-
`Symlink detected in archive: ${header.name}. Symlinks are not supported for security reasons.`,
236+
// Skip if destroy already scheduled
237+
if (destroyScheduled) {
238+
return header
239+
}
240+
241+
// Check for symlinks
242+
if (header.type === 'symlink' || header.type === 'link') {
243+
destroyScheduled = true
244+
process.nextTick(() => {
245+
extractStream.destroy(
246+
new Error(
247+
`Symlink detected in archive: ${header.name}. Symlinks are not supported for security reasons.`,
248+
),
227249
)
228-
// Destroy stream on next tick to avoid unhandled error warnings
229-
setImmediate(() => extractStream.destroy(error))
230-
return header
231-
}
250+
})
251+
return header
252+
}
232253

233-
// Check individual file size
234-
if (header.size && header.size > maxFileSize) {
235-
const error = new Error(
236-
`File size exceeds limit: ${header.name} (${header.size} bytes > ${maxFileSize} bytes)`,
254+
// Check individual file size
255+
if (header.size && header.size > maxFileSize) {
256+
destroyScheduled = true
257+
process.nextTick(() => {
258+
extractStream.destroy(
259+
new Error(
260+
`File size exceeds limit: ${header.name} (${header.size} bytes > ${maxFileSize} bytes)`,
261+
),
237262
)
238-
setImmediate(() => extractStream.destroy(error))
239-
return header
240-
}
263+
})
264+
return header
265+
}
241266

242-
// Check total extracted size
243-
if (header.size) {
244-
totalExtractedSize += header.size
245-
if (totalExtractedSize > maxTotalSize) {
246-
const error = new Error(
247-
`Total extracted size exceeds limit: ${totalExtractedSize} bytes > ${maxTotalSize} bytes`,
267+
// Check total extracted size
268+
if (header.size) {
269+
totalExtractedSize += header.size
270+
if (totalExtractedSize > maxTotalSize) {
271+
destroyScheduled = true
272+
process.nextTick(() => {
273+
extractStream.destroy(
274+
new Error(
275+
`Total extracted size exceeds limit: ${totalExtractedSize} bytes > ${maxTotalSize} bytes`,
276+
),
248277
)
249-
setImmediate(() => extractStream.destroy(error))
250-
return header
251-
}
278+
})
279+
return header
252280
}
253-
254-
return header
255-
} catch (error) {
256-
// Catch any unexpected errors and destroy stream
257-
setImmediate(() => extractStream.destroy(error as Error))
258-
return header
259281
}
282+
283+
return header
260284
},
261285
strip,
262286
})
263287

264-
const readStream = createReadStream(archivePath)
265-
266-
// Handle stream errors to prevent unhandled rejections
288+
// Attach error handler before starting pipeline to catch errors
267289
extractStream.on('error', () => {
268290
// Error will be caught by pipeline
269291
})
270292

293+
const readStream = createReadStream(archivePath)
294+
271295
try {
272296
await pipeline(readStream, createGunzip(), extractStream)
273297
} catch (error) {

0 commit comments

Comments
 (0)