diff --git a/## GitHub Copilot Chat.md b/## GitHub Copilot Chat.md new file mode 100644 index 00000000..abc1cd42 --- /dev/null +++ b/## GitHub Copilot Chat.md @@ -0,0 +1,36 @@ +## GitHub Copilot Chat + +- Extension Version: 0.23.2 (prod) +- VS Code: vscode/1.96.4 +- OS: Linux + +## Network + +User Settings: +```json + "github.copilot.advanced.debug.useElectronFetcher": true, + "github.copilot.advanced.debug.useNodeFetcher": false, + "github.copilot.advanced.debug.useNodeFetchFetcher": true +``` + +Connecting to https://api.github.com: +- DNS ipv4 Lookup: timed out after 10 seconds +- DNS ipv6 Lookup: Error (8 ms): getaddrinfo ENOTFOUND api.github.com +- Proxy URL: None (2 ms) +- Electron fetch (configured): timed out after 10 seconds +- Node.js https: timed out after 10 seconds +- Node.js fetch: timed out after 10 seconds +- Helix fetch: timed out after 10 seconds + +Connecting to https://api.individual.githubcopilot.com/_ping: +- DNS ipv4 Lookup: timed out after 10 seconds +- DNS ipv6 Lookup: timed out after 10 seconds +- Proxy URL: None (16 ms) +- Electron fetch (configured): timed out after 10 seconds +- Node.js https: timed out after 10 seconds +- Node.js fetch: timed out after 10 seconds +- Helix fetch: timed out after 10 seconds + +## Documentation + +In corporate networks: [Troubleshooting firewall settings for GitHub Copilot](https://docs.github.com/en/copilot/troubleshooting-github-copilot/troubleshooting-firewall-settings-for-github-copilot). \ No newline at end of file diff --git a/.autorestic.yml b/.autorestic.yml new file mode 100644 index 00000000..3e662abd --- /dev/null +++ b/.autorestic.yml @@ -0,0 +1,10 @@ + +version: 2 +locations: + home: + from: /home/user + to: local_backup +backends: + local_backup: + type: local + path: /home/user/autorestic_backup diff --git a/.babelrc.json b/.babelrc.json new file mode 100644 index 00000000..92e4ea82 --- /dev/null +++ b/.babelrc.json @@ -0,0 +1,5 @@ +{ + "presets": [...], + "plugins": [...] + } +{ "extends": "../../.babelrc" } \ No newline at end of file diff --git a/.browserslistrc b/.browserslistrc new file mode 100644 index 00000000..bd9fb772 --- /dev/null +++ b/.browserslistrc @@ -0,0 +1,2 @@ +> 0.25% +not dead \ No newline at end of file diff --git a/.codesandbox/backend/.env b/.codesandbox/backend/.env new file mode 100644 index 00000000..f2c8abdd --- /dev/null +++ b/.codesandbox/backend/.env @@ -0,0 +1,10 @@ +API_="my secret api key from.env" +DATABASE_SE="super secret" + +MONGO_URL mongodb+srv://:@cluster0.a9ylqls.mongodb.net/?retryWrites=true&w=majority&appName=Cluster0 + +Valid secret +a-string-secret-at-least-256-bits-long + +FACEBOOK_CLIENT_ID=__INSERT_CLIENT_ID_HERE__ +FACEBOOK_CLIENT_SECRET=__INSERT_CLIENT_SECRET_HERE__ \ No newline at end of file diff --git a/.codesandbox/backend/README.md/README.md b/.codesandbox/backend/README.md/README.md new file mode 100644 index 00000000..752293c8 --- /dev/null +++ b/.codesandbox/backend/README.md/README.md @@ -0,0 +1,84 @@ +# Express API Starter Project + +This project includes the packages and babel setup for an express server, and is just meant to make things a little simpler to get up and running with. + +## Getting started + +Install dependencies with `npm install`, then start the server by running `npm run dev` + +## View it live + +Every project should be deployed somewhere. Be sure to include the link to the deployed project so that the viewer can click around and see what it's all about. + +MongoDB Stress Test +MPK version +Running + +> +c +d +M +o +n +g +o +D +B +− +S +t +r +e +s +s +− +T +e +s +t +> python write_pbs.py --nclients 192 --host 'myhost.domain.com' --port 27018 $> qsub run.pbs + +Dang version +Files: +w.py: Main program +w_run.pbs: PBS script +Example of running w.py + + # Pick a time 100 seconds in the future + future=`python -c "import time; print(int(time.time()) + 100)"` + # Run 1 client at that time + ./w.py --host=128.55.57.13 --ndocs=100 --when=$future +Util files +sharded-mongo : Run simple localhost setup with 2 shards + +Password based authentication +Client-Server program which will facilitate a client to register itself to the server. A server should keep a table of user-hash(password) entry for each user. + +This program registers user with username, password and stores password hash in dictonary. Also, when a user tries to login , password validation occurs. + +Python,Socket programming + +IMPLEMENTATION (python3) +SERVER : socket with multithreading CLIENT : socket HASH : SHA256 ( using pythons hashlib) + +INPUT (client side) +1. Username +2. Password +OUPUT +1. Registeration Successful : new user +2. Connection Successful : password correct +3. Login Failed : password incorrect + +Hash table : Contains User and Password. It is implemented using python Dictionary data structure + +React + Vite +This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules. + +Currently, two official plugins are available: + +@vitejs/plugin-react uses Babel for Fast Refresh +@vitejs/plugin-react-swc uses SWC for Fast Refresh +Expanding the ESLint configuration +If you are developing a production application, we recommend using TypeScript with type-aware lint rules enabled. Check out the TS template for information on how to integrate TypeScript and typescript-eslint in your project. + +Install dependencies with npm install, then start the server by running npm run dev \ No newline at end of file diff --git a/.codesandbox/backend/package.json b/.codesandbox/backend/package.json new file mode 100644 index 00000000..40ed5bad --- /dev/null +++ b/.codesandbox/backend/package.json @@ -0,0 +1,31 @@ + +{ + "name": "express-api-starter", + "version": "1.0.0", + "description": "Starter project to get and running with express quickly", + "scripts": { + "start": "babel-node server.js", + "dev": "nodemon server.js --exec babel-node", + "test": "jest" + }, + "author": "Special Garden Group", + "licence": "ISC OR GPL-3.0", + "dependencies": { + "express": "^5.1.0", + "@fontsource/roboto": "^5.2.6", + "@mui/icons-material": "^7.2.0", + "@mui/material": "^7.2.0", + "@mui/styled-engine-sc": "^7.2.0", + "react": "^19.1.0", + "react-dom": "^19.1.0", + "styled-components": "^6.1.19", + "@babel/core": "^7.17.9", + "@babel/node": "^7.16.8", + "@babel/preset-env": "^7.16.11", + "nodemon": "^3.0.1", + "cors": "^2.8.5", + "dotenv": "^16.5.0", + "mongodb": "^6.16.0", + "mongoose": "^8.15.1" + } +} diff --git a/.codesandbox/backend/server.js b/.codesandbox/backend/server.js new file mode 100644 index 00000000..29921db9 --- /dev/null +++ b/.codesandbox/backend/server.js @@ -0,0 +1,148 @@ +import express from 'express' +import bodyParser from 'body-parser' +import cors from 'cors' +import crypto from 'crypto' +import mongoose from 'mongoose' +import bcrypt from 'bcrypt-nodejs' +import dotenv from 'dotenv' + +dotenv.config() + +const mongoUrl =process.env_MONGO_URL II "mongodb://localhost/auth", +mongoose.connect(mongoUrl, { useNewUrlParser: true, useundefinedtopology: true}) +mongoose.Promise = Promise + +const { Schema, model } = mongoose +const userSchema = new Schema({ + +const User = mongoose.model('User', { + name: { + type: String, + unique: true + }, + password:{ + type:String, + required:true + }, + accessToken:{ + type:String, + default: () => crypto.randomBytes(128)toString('hex') + } + }); + const User = model("User", userSchema) + + // Example + // POST Request + const request = {name: "Bob", password: "foobar}; + // DB Entry + const dbEntry = {name :"Bob, password:"5abbc32983def"} + bcrypt.compareSync(request.password, dbEntry.password); + // One-way encryption +const user = new User({name:"Bob", password:bcrypt.hashSync("foobar")}); +user.save(); +// Defines the port the app will run on. Defaults to 8080, but can be overritten when starting the server. For example: +// +// PORT-9000 npm start +const port = process.env.PORT II 8080 +const app = express() +const authenticateUser = async (req, res, next) => { + const user = await User.findOne({accesToken: req.header('Authorization')}); + if(user){ + req.user = user; + next(); + {else{ +res.status(401).json({loggedOut:true}); + }} + } +} + +// Add middlewares to enable cors and json body parsing +app.use(cors()) +app.use(express.json()); +app.use(bodyParser.json()) + +// Start defining your routes here +app.get('/', (req, res) => { + res.send('Hello Member') + app.post('/tweets' authenticateUser); + app.post('/tweets', async (req,res) =>{ + // This will only happen if the next() function is called from middleware! + // now we can access the req.user object from the middleware + }) + + } + }) +}) + +// Start defining your routes here +app.get('/,(req, res) => { + res.send('Hello world') +}) +app.post('/sessions' async (req res) => { + const user = await User.findOne({name: req.body.name}); + if(user && bcrypt.compareSync(req.body.password, user.password)){ + // Success + res.json({userId: user._id, accessToken}}; + }else{ + // Failure + // a.User does not exist + // b. Encrypted password does not march + res.json({notFound: true}); + } + } +}); +app.post("/users", (req, res) => { + try { + const { name, email, password } = req.body + const salt = bcrypt.genSaltSync() + const user = new User({ name, email, password: bcrypt.hashSync(password, salt) }) + user.save() + res.status(201).json({ + success: true, + message: "User created", + id: user._id, + accessToken: user.accessToken, + }) + } catch (error) { + res.status(400).json({ + success: false, + message: "Could not create user", + errors: error + }) + } + }) + + app.get("/secrets", authenticateUser) +app.get("/secrets", (req, res) => { + res.json({ + secret: "This is secret" + app.get('/secrets', (req, res) =>{ + res.jsons({secret: 'This is a super secret message'}) + }); + } + app.post('/sessions' async (req, res) => { + const user = await User.findOne({email: req.body.email}); + if (user && bcrypt.compareSync(req.body.password. user.password)){ + res.json({userId: user_Id, assessToken: user.accessToken}); + }else{ + res.json({notFound: true}); + +// Add middlewares to enable cors and json body parsing +app.use(cors()) +app.use(bodyParser.json()) +//Start defing your routes here +app.get('/',(req, res) => { + //fetch('...,{headers:{Authorization: + 'my secret apt key'}}} + res.send{process.env.API_KEY}}) +} + + + +// Start the server +}) +app.listen(port, () => { + console.log('Server running on http://localhost:${port}') +}) + console.log(bcrypt.hashSync("foobar")); +}) \ No newline at end of file diff --git a/.codesandbox/backend/tasks.json b/.codesandbox/backend/tasks.json new file mode 100644 index 00000000..9a67839b --- /dev/null +++ b/.codesandbox/backend/tasks.json @@ -0,0 +1,23 @@ +{ + // These tasks will run in order when initializing your CodeSandbox project. + "setupTasks": [ + { + "command": "pnpm install", + "name": "Installing Dependencies" + } + ], + + // These tasks can be run from CodeSandbox. Running one will open a log in the app. + "tasks": { + "start": { + "name": "start", + "command": "pnpm start", + "runAtStart": false + }, + "dev": { + "name": "dev", + "command": "pnpm dev", + "runAtStart": true + } + } +} diff --git a/.codesandbox/node/assert.js b/.codesandbox/node/assert.js new file mode 100644 index 00000000..2ddc44d9 --- /dev/null +++ b/.codesandbox/node/assert.js @@ -0,0 +1,824 @@ +'use strict'; + +const { + ArrayPrototypeIndexOf, + ArrayPrototypeJoin, + ArrayPrototypePush, + ArrayPrototypeSlice, + Error, + NumberIsNaN, + ObjectAssign, + ObjectIs, + ObjectKeys, + ObjectPrototypeIsPrototypeOf, + ReflectApply, + RegExpPrototypeExec, + String, + StringPrototypeIndexOf, + StringPrototypeSlice, + StringPrototypeSplit, +} = primordials; + +const { + codes: { + ERR_AMBIGUOUS_ARGUMENT, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_RETURN_VALUE, + ERR_MISSING_ARGS, + }, +} = require('internal/errors'); +const AssertionError = require('internal/assert/assertion_error'); +const { inspect } = require('internal/util/inspect'); +const { + isPromise, + isRegExp, +} = require('internal/util/types'); +const { isError, deprecate } = require('internal/util'); +const { innerOk } = require('internal/assert/utils'); + +const CallTracker = require('internal/assert/calltracker'); +const { + validateFunction, +} = require('internal/validators'); + +let isDeepEqual; +let isDeepStrictEqual; +let isPartialStrictEqual; + +function lazyLoadComparison() { + const comparison = require('internal/util/comparisons'); + isDeepEqual = comparison.isDeepEqual; + isDeepStrictEqual = comparison.isDeepStrictEqual; + isPartialStrictEqual = comparison.isPartialStrictEqual; +} + +let warned = false; + +// The assert module provides functions that throw +// AssertionError's when particular conditions are not met. The +// assert module must conform to the following interface. + +const assert = module.exports = ok; + +const NO_EXCEPTION_SENTINEL = {}; + +// All of the following functions must throw an AssertionError +// when a corresponding condition is not met, with a message that +// may be undefined if not provided. All assertion methods provide +// both the actual and expected values to the assertion error for +// display purposes. + +function innerFail(obj) { + if (obj.message instanceof Error) throw obj.message; + + throw new AssertionError(obj); +} + +/** + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @param {string} [operator] + * @param {Function} [stackStartFn] + */ +function fail(actual, expected, message, operator, stackStartFn) { + const argsLen = arguments.length; + + let internalMessage = false; + if (actual == null && argsLen <= 1) { + internalMessage = true; + message = 'Failed'; + } else if (argsLen === 1) { + message = actual; + actual = undefined; + } else { + if (warned === false) { + warned = true; + process.emitWarning( + 'assert.fail() with more than one argument is deprecated. ' + + 'Please use assert.strictEqual() instead or only pass a message.', + 'DeprecationWarning', + 'DEP0094', + ); + } + if (argsLen === 2) + operator = '!='; + } + + if (message instanceof Error) throw message; + + const errArgs = { + actual, + expected, + operator: operator === undefined ? 'fail' : operator, + stackStartFn: stackStartFn || fail, + message, + }; + const err = new AssertionError(errArgs); + if (internalMessage) { + err.generatedMessage = true; + } + throw err; +} + +assert.fail = fail; + +// The AssertionError is defined in internal/error. +assert.AssertionError = AssertionError; + +/** + * Pure assertion tests whether a value is truthy, as determined + * by !!value. + * @param {...any} args + * @returns {void} + */ +function ok(...args) { + innerOk(ok, args.length, ...args); +} +assert.ok = ok; + +/** + * The equality assertion tests shallow, coercive equality with ==. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +/* eslint-disable no-restricted-properties */ +assert.equal = function equal(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + // eslint-disable-next-line eqeqeq + if (actual != expected && (!NumberIsNaN(actual) || !NumberIsNaN(expected))) { + innerFail({ + actual, + expected, + message, + operator: '==', + stackStartFn: equal, + }); + } +}; + +/** + * The non-equality assertion tests for whether two objects are not + * equal with !=. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.notEqual = function notEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + // eslint-disable-next-line eqeqeq + if (actual == expected || (NumberIsNaN(actual) && NumberIsNaN(expected))) { + innerFail({ + actual, + expected, + message, + operator: '!=', + stackStartFn: notEqual, + }); + } +}; + +/** + * The deep equivalence assertion tests a deep equality relation. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.deepEqual = function deepEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + if (!isDeepEqual(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'deepEqual', + stackStartFn: deepEqual, + }); + } +}; + +/** + * The deep non-equivalence assertion tests for any deep inequality. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.notDeepEqual = function notDeepEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + if (isDeepEqual(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'notDeepEqual', + stackStartFn: notDeepEqual, + }); + } +}; +/* eslint-enable */ + +/** + * The deep strict equivalence assertion tests a deep strict equality + * relation. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.deepStrictEqual = function deepStrictEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + if (!isDeepStrictEqual(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'deepStrictEqual', + stackStartFn: deepStrictEqual, + }); + } +}; + +/** + * The deep strict non-equivalence assertion tests for any deep strict + * inequality. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.notDeepStrictEqual = notDeepStrictEqual; +function notDeepStrictEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + if (isDeepStrictEqual(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'notDeepStrictEqual', + stackStartFn: notDeepStrictEqual, + }); + } +} + +/** + * The strict equivalence assertion tests a strict equality relation. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.strictEqual = function strictEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (!ObjectIs(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'strictEqual', + stackStartFn: strictEqual, + }); + } +}; + +/** + * The strict non-equivalence assertion tests for any strict inequality. + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.notStrictEqual = function notStrictEqual(actual, expected, message) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (ObjectIs(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'notStrictEqual', + stackStartFn: notStrictEqual, + }); + } +}; + +/** + * The strict equivalence assertion test between two objects + * @param {any} actual + * @param {any} expected + * @param {string | Error} [message] + * @returns {void} + */ +assert.partialDeepStrictEqual = function partialDeepStrictEqual( + actual, + expected, + message, +) { + if (arguments.length < 2) { + throw new ERR_MISSING_ARGS('actual', 'expected'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + if (!isPartialStrictEqual(actual, expected)) { + innerFail({ + actual, + expected, + message, + operator: 'partialDeepStrictEqual', + stackStartFn: partialDeepStrictEqual, + }); + } +}; + +class Comparison { + constructor(obj, keys, actual) { + for (const key of keys) { + if (key in obj) { + if (actual !== undefined && + typeof actual[key] === 'string' && + isRegExp(obj[key]) && + RegExpPrototypeExec(obj[key], actual[key]) !== null) { + this[key] = actual[key]; + } else { + this[key] = obj[key]; + } + } + } + } +} + +function compareExceptionKey(actual, expected, key, message, keys, fn) { + if (!(key in actual) || !isDeepStrictEqual(actual[key], expected[key])) { + if (!message) { + // Create placeholder objects to create a nice output. + const a = new Comparison(actual, keys); + const b = new Comparison(expected, keys, actual); + + const err = new AssertionError({ + actual: a, + expected: b, + operator: 'deepStrictEqual', + stackStartFn: fn, + }); + err.actual = actual; + err.expected = expected; + err.operator = fn.name; + throw err; + } + innerFail({ + actual, + expected, + message, + operator: fn.name, + stackStartFn: fn, + }); + } +} + +function expectedException(actual, expected, message, fn) { + let generatedMessage = false; + let throwError = false; + + if (typeof expected !== 'function') { + // Handle regular expressions. + if (isRegExp(expected)) { + const str = String(actual); + if (RegExpPrototypeExec(expected, str) !== null) + return; + + if (!message) { + generatedMessage = true; + message = 'The input did not match the regular expression ' + + `${inspect(expected)}. Input:\n\n${inspect(str)}\n`; + } + throwError = true; + // Handle primitives properly. + } else if (typeof actual !== 'object' || actual === null) { + const err = new AssertionError({ + actual, + expected, + message, + operator: 'deepStrictEqual', + stackStartFn: fn, + }); + err.operator = fn.name; + throw err; + } else { + // Handle validation objects. + const keys = ObjectKeys(expected); + // Special handle errors to make sure the name and the message are + // compared as well. + if (expected instanceof Error) { + ArrayPrototypePush(keys, 'name', 'message'); + } else if (keys.length === 0) { + throw new ERR_INVALID_ARG_VALUE('error', + expected, 'may not be an empty object'); + } + if (isDeepEqual === undefined) lazyLoadComparison(); + for (const key of keys) { + if (typeof actual[key] === 'string' && + isRegExp(expected[key]) && + RegExpPrototypeExec(expected[key], actual[key]) !== null) { + continue; + } + compareExceptionKey(actual, expected, key, message, keys, fn); + } + return; + } + // Guard instanceof against arrow functions as they don't have a prototype. + // Check for matching Error classes. + } else if (expected.prototype !== undefined && actual instanceof expected) { + return; + } else if (ObjectPrototypeIsPrototypeOf(Error, expected)) { + if (!message) { + generatedMessage = true; + message = 'The error is expected to be an instance of ' + + `"${expected.name}". Received `; + if (isError(actual)) { + const name = (actual.constructor?.name) || + actual.name; + if (expected.name === name) { + message += 'an error with identical name but a different prototype.'; + } else { + message += `"${name}"`; + } + if (actual.message) { + message += `\n\nError message:\n\n${actual.message}`; + } + } else { + message += `"${inspect(actual, { depth: -1 })}"`; + } + } + throwError = true; + } else { + // Check validation functions return value. + const res = ReflectApply(expected, {}, [actual]); + if (res !== true) { + if (!message) { + generatedMessage = true; + const name = expected.name ? `"${expected.name}" ` : ''; + message = `The ${name}validation function is expected to return` + + ` "true". Received ${inspect(res)}`; + + if (isError(actual)) { + message += `\n\nCaught error:\n\n${actual}`; + } + } + throwError = true; + } + } + + if (throwError) { + const err = new AssertionError({ + actual, + expected, + message, + operator: fn.name, + stackStartFn: fn, + }); + err.generatedMessage = generatedMessage; + throw err; + } +} + +function getActual(fn) { + validateFunction(fn, 'fn'); + try { + fn(); + } catch (e) { + return e; + } + return NO_EXCEPTION_SENTINEL; +} + +function checkIsPromise(obj) { + // Accept native ES6 promises and promises that are implemented in a similar + // way. Do not accept thenables that use a function as `obj` and that have no + // `catch` handler. + return isPromise(obj) || + (obj !== null && typeof obj === 'object' && + typeof obj.then === 'function' && + typeof obj.catch === 'function'); +} + +async function waitForActual(promiseFn) { + let resultPromise; + if (typeof promiseFn === 'function') { + // Return a rejected promise if `promiseFn` throws synchronously. + resultPromise = promiseFn(); + // Fail in case no promise is returned. + if (!checkIsPromise(resultPromise)) { + throw new ERR_INVALID_RETURN_VALUE('instance of Promise', + 'promiseFn', resultPromise); + } + } else if (checkIsPromise(promiseFn)) { + resultPromise = promiseFn; + } else { + throw new ERR_INVALID_ARG_TYPE( + 'promiseFn', ['Function', 'Promise'], promiseFn); + } + + try { + await resultPromise; + } catch (e) { + return e; + } + return NO_EXCEPTION_SENTINEL; +} + +function expectsError(stackStartFn, actual, error, message) { + if (typeof error === 'string') { + if (arguments.length === 4) { + throw new ERR_INVALID_ARG_TYPE('error', + ['Object', 'Error', 'Function', 'RegExp'], + error); + } + if (typeof actual === 'object' && actual !== null) { + if (actual.message === error) { + throw new ERR_AMBIGUOUS_ARGUMENT( + 'error/message', + `The error message "${actual.message}" is identical to the message.`, + ); + } + } else if (actual === error) { + throw new ERR_AMBIGUOUS_ARGUMENT( + 'error/message', + `The error "${actual}" is identical to the message.`, + ); + } + message = error; + error = undefined; + } else if (error != null && + typeof error !== 'object' && + typeof error !== 'function') { + throw new ERR_INVALID_ARG_TYPE('error', + ['Object', 'Error', 'Function', 'RegExp'], + error); + } + + if (actual === NO_EXCEPTION_SENTINEL) { + let details = ''; + if (error?.name) { + details += ` (${error.name})`; + } + details += message ? `: ${message}` : '.'; + const fnType = stackStartFn === assert.rejects ? 'rejection' : 'exception'; + innerFail({ + actual: undefined, + expected: error, + operator: stackStartFn.name, + message: `Missing expected ${fnType}${details}`, + stackStartFn, + }); + } + + if (!error) + return; + + expectedException(actual, error, message, stackStartFn); +} + +function hasMatchingError(actual, expected) { + if (typeof expected !== 'function') { + if (isRegExp(expected)) { + const str = String(actual); + return RegExpPrototypeExec(expected, str) !== null; + } + throw new ERR_INVALID_ARG_TYPE( + 'expected', ['Function', 'RegExp'], expected, + ); + } + // Guard instanceof against arrow functions as they don't have a prototype. + if (expected.prototype !== undefined && actual instanceof expected) { + return true; + } + if (ObjectPrototypeIsPrototypeOf(Error, expected)) { + return false; + } + return ReflectApply(expected, {}, [actual]) === true; +} + +function expectsNoError(stackStartFn, actual, error, message) { + if (actual === NO_EXCEPTION_SENTINEL) + return; + + if (typeof error === 'string') { + message = error; + error = undefined; + } + + if (!error || hasMatchingError(actual, error)) { + const details = message ? `: ${message}` : '.'; + const fnType = stackStartFn === assert.doesNotReject ? + 'rejection' : 'exception'; + innerFail({ + actual, + expected: error, + operator: stackStartFn.name, + message: `Got unwanted ${fnType}${details}\n` + + `Actual message: "${actual?.message}"`, + stackStartFn, + }); + } + throw actual; +} + +/** + * Expects the function `promiseFn` to throw an error. + * @param {() => any} promiseFn + * @param {...any} [args] + * @returns {void} + */ +assert.throws = function throws(promiseFn, ...args) { + expectsError(throws, getActual(promiseFn), ...args); +}; + +/** + * Expects `promiseFn` function or its value to reject. + * @param {() => Promise} promiseFn + * @param {...any} [args] + * @returns {Promise} + */ +assert.rejects = async function rejects(promiseFn, ...args) { + expectsError(rejects, await waitForActual(promiseFn), ...args); +}; + +/** + * Asserts that the function `fn` does not throw an error. + * @param {() => any} fn + * @param {...any} [args] + * @returns {void} + */ +assert.doesNotThrow = function doesNotThrow(fn, ...args) { + expectsNoError(doesNotThrow, getActual(fn), ...args); +}; + +/** + * Expects `fn` or its value to not reject. + * @param {() => Promise} fn + * @param {...any} [args] + * @returns {Promise} + */ +assert.doesNotReject = async function doesNotReject(fn, ...args) { + expectsNoError(doesNotReject, await waitForActual(fn), ...args); +}; + +/** + * Throws `AssertionError` if the value is not `null` or `undefined`. + * @param {any} err + * @returns {void} + */ +assert.ifError = function ifError(err) { + if (err !== null && err !== undefined) { + let message = 'ifError got unwanted exception: '; + if (typeof err === 'object' && typeof err.message === 'string') { + if (err.message.length === 0 && err.constructor) { + message += err.constructor.name; + } else { + message += err.message; + } + } else { + message += inspect(err); + } + + const newErr = new AssertionError({ + actual: err, + expected: null, + operator: 'ifError', + message, + stackStartFn: ifError, + }); + + // Make sure we actually have a stack trace! + const origStack = err.stack; + + if (typeof origStack === 'string') { + // This will remove any duplicated frames from the error frames taken + // from within `ifError` and add the original error frames to the newly + // created ones. + const origStackStart = StringPrototypeIndexOf(origStack, '\n at'); + if (origStackStart !== -1) { + const originalFrames = StringPrototypeSplit( + StringPrototypeSlice(origStack, origStackStart + 1), + '\n', + ); + // Filter all frames existing in err.stack. + let newFrames = StringPrototypeSplit(newErr.stack, '\n'); + for (const errFrame of originalFrames) { + // Find the first occurrence of the frame. + const pos = ArrayPrototypeIndexOf(newFrames, errFrame); + if (pos !== -1) { + // Only keep new frames. + newFrames = ArrayPrototypeSlice(newFrames, 0, pos); + break; + } + } + const stackStart = ArrayPrototypeJoin(newFrames, '\n'); + const stackEnd = ArrayPrototypeJoin(originalFrames, '\n'); + newErr.stack = `${stackStart}\n${stackEnd}`; + } + } + + throw newErr; + } +}; + +function internalMatch(string, regexp, message, fn) { + if (!isRegExp(regexp)) { + throw new ERR_INVALID_ARG_TYPE( + 'regexp', 'RegExp', regexp, + ); + } + const match = fn === assert.match; + if (typeof string !== 'string' || + RegExpPrototypeExec(regexp, string) !== null !== match) { + if (message instanceof Error) { + throw message; + } + + const generatedMessage = !message; + + // 'The input was expected to not match the regular expression ' + + message ||= (typeof string !== 'string' ? + 'The "string" argument must be of type string. Received type ' + + `${typeof string} (${inspect(string)})` : + (match ? + 'The input did not match the regular expression ' : + 'The input was expected to not match the regular expression ') + + `${inspect(regexp)}. Input:\n\n${inspect(string)}\n`); + const err = new AssertionError({ + actual: string, + expected: regexp, + message, + operator: fn.name, + stackStartFn: fn, + }); + err.generatedMessage = generatedMessage; + throw err; + } +} + +/** + * Expects the `string` input to match the regular expression. + * @param {string} string + * @param {RegExp} regexp + * @param {string | Error} [message] + * @returns {void} + */ +assert.match = function match(string, regexp, message) { + internalMatch(string, regexp, message, match); +}; + +/** + * Expects the `string` input not to match the regular expression. + * @param {string} string + * @param {RegExp} regexp + * @param {string | Error} [message] + * @returns {void} + */ +assert.doesNotMatch = function doesNotMatch(string, regexp, message) { + internalMatch(string, regexp, message, doesNotMatch); +}; + +assert.CallTracker = deprecate(CallTracker, 'assert.CallTracker is deprecated.', 'DEP0173'); + +/** + * Expose a strict only variant of assert. + * @param {...any} args + * @returns {void} + */ +function strict(...args) { + innerOk(strict, args.length, ...args); +} + +assert.strict = ObjectAssign(strict, assert, { + equal: assert.strictEqual, + deepEqual: assert.deepStrictEqual, + notEqual: assert.notStrictEqual, + notDeepEqual: assert.notDeepStrictEqual, +}); + +assert.strict.strict = assert.strict; \ No newline at end of file diff --git a/.codesandbox/node/async_hooks.js b/.codesandbox/node/async_hooks.js new file mode 100644 index 00000000..8c57bc67 --- /dev/null +++ b/.codesandbox/node/async_hooks.js @@ -0,0 +1,296 @@ +'use strict'; + +const { + ArrayPrototypeIncludes, + ArrayPrototypeIndexOf, + ArrayPrototypePush, + ArrayPrototypeSplice, + ArrayPrototypeUnshift, + FunctionPrototypeBind, + NumberIsSafeInteger, + ObjectDefineProperties, + ObjectFreeze, + ReflectApply, + Symbol, +} = primordials; + +const { + ERR_ASYNC_CALLBACK, + ERR_ASYNC_TYPE, + ERR_INVALID_ASYNC_ID, +} = require('internal/errors').codes; +const { + deprecate, + kEmptyObject, +} = require('internal/util'); +const { + validateFunction, + validateString, +} = require('internal/validators'); +const internal_async_hooks = require('internal/async_hooks'); + +const AsyncContextFrame = require('internal/async_context_frame'); + +// Get functions +// For userland AsyncResources, make sure to emit a destroy event when the +// resource gets gced. +const { registerDestroyHook, kNoPromiseHook } = internal_async_hooks; +const { + asyncWrap, + executionAsyncId, + triggerAsyncId, + // Private API + hasAsyncIdStack, + getHookArrays, + enableHooks, + disableHooks, + updatePromiseHookMode, + executionAsyncResource, + // Internal Embedder API + newAsyncId, + getDefaultTriggerAsyncId, + emitInit, + emitBefore, + emitAfter, + emitDestroy, + enabledHooksExist, + initHooksExist, + destroyHooksExist, +} = internal_async_hooks; + +// Get symbols +const { + async_id_symbol, trigger_async_id_symbol, + init_symbol, before_symbol, after_symbol, destroy_symbol, + promise_resolve_symbol, +} = internal_async_hooks.symbols; + +// Get constants +const { + kInit, kBefore, kAfter, kDestroy, kTotals, kPromiseResolve, +} = internal_async_hooks.constants; + +// Listener API // + +class AsyncHook { + constructor({ init, before, after, destroy, promiseResolve }) { + if (init !== undefined && typeof init !== 'function') + throw new ERR_ASYNC_CALLBACK('hook.init'); + if (before !== undefined && typeof before !== 'function') + throw new ERR_ASYNC_CALLBACK('hook.before'); + if (after !== undefined && typeof after !== 'function') + throw new ERR_ASYNC_CALLBACK('hook.after'); + if (destroy !== undefined && typeof destroy !== 'function') + throw new ERR_ASYNC_CALLBACK('hook.destroy'); + if (promiseResolve !== undefined && typeof promiseResolve !== 'function') + throw new ERR_ASYNC_CALLBACK('hook.promiseResolve'); + + this[init_symbol] = init; + this[before_symbol] = before; + this[after_symbol] = after; + this[destroy_symbol] = destroy; + this[promise_resolve_symbol] = promiseResolve; + this[kNoPromiseHook] = false; + } + + enable() { + // The set of callbacks for a hook should be the same regardless of whether + // enable()/disable() are run during their execution. The following + // references are reassigned to the tmp arrays if a hook is currently being + // processed. + const { 0: hooks_array, 1: hook_fields } = getHookArrays(); + + // Each hook is only allowed to be added once. + if (ArrayPrototypeIncludes(hooks_array, this)) + return this; + + const prev_kTotals = hook_fields[kTotals]; + + // createHook() has already enforced that the callbacks are all functions, + // so here simply increment the count of whether each callbacks exists or + // not. + hook_fields[kTotals] = hook_fields[kInit] += +!!this[init_symbol]; + hook_fields[kTotals] += hook_fields[kBefore] += +!!this[before_symbol]; + hook_fields[kTotals] += hook_fields[kAfter] += +!!this[after_symbol]; + hook_fields[kTotals] += hook_fields[kDestroy] += +!!this[destroy_symbol]; + hook_fields[kTotals] += + hook_fields[kPromiseResolve] += +!!this[promise_resolve_symbol]; + ArrayPrototypePush(hooks_array, this); + + if (prev_kTotals === 0 && hook_fields[kTotals] > 0) { + enableHooks(); + } + + if (!this[kNoPromiseHook]) { + updatePromiseHookMode(); + } + + return this; + } + + disable() { + const { 0: hooks_array, 1: hook_fields } = getHookArrays(); + + const index = ArrayPrototypeIndexOf(hooks_array, this); + if (index === -1) + return this; + + const prev_kTotals = hook_fields[kTotals]; + + hook_fields[kTotals] = hook_fields[kInit] -= +!!this[init_symbol]; + hook_fields[kTotals] += hook_fields[kBefore] -= +!!this[before_symbol]; + hook_fields[kTotals] += hook_fields[kAfter] -= +!!this[after_symbol]; + hook_fields[kTotals] += hook_fields[kDestroy] -= +!!this[destroy_symbol]; + hook_fields[kTotals] += + hook_fields[kPromiseResolve] -= +!!this[promise_resolve_symbol]; + ArrayPrototypeSplice(hooks_array, index, 1); + + if (prev_kTotals > 0 && hook_fields[kTotals] === 0) { + disableHooks(); + } + + return this; + } +} + + +function createHook(fns) { + return new AsyncHook(fns); +} + + +// Embedder API // + +const destroyedSymbol = Symbol('destroyed'); +const contextFrameSymbol = Symbol('context_frame'); + +class AsyncResource { + constructor(type, opts = kEmptyObject) { + validateString(type, 'type'); + + let triggerAsyncId = opts; + let requireManualDestroy = false; + if (typeof opts !== 'number') { + triggerAsyncId = opts.triggerAsyncId === undefined ? + getDefaultTriggerAsyncId() : opts.triggerAsyncId; + requireManualDestroy = !!opts.requireManualDestroy; + } + + // Unlike emitInitScript, AsyncResource doesn't supports null as the + // triggerAsyncId. + if (!NumberIsSafeInteger(triggerAsyncId) || triggerAsyncId < -1) { + throw new ERR_INVALID_ASYNC_ID('triggerAsyncId', triggerAsyncId); + } + + this[contextFrameSymbol] = AsyncContextFrame.current(); + + const asyncId = newAsyncId(); + this[async_id_symbol] = asyncId; + this[trigger_async_id_symbol] = triggerAsyncId; + + if (initHooksExist()) { + if (enabledHooksExist() && type.length === 0) { + throw new ERR_ASYNC_TYPE(type); + } + + emitInit(asyncId, type, triggerAsyncId, this); + } + + if (!requireManualDestroy && destroyHooksExist()) { + // This prop name (destroyed) has to be synchronized with C++ + const destroyed = { destroyed: false }; + this[destroyedSymbol] = destroyed; + registerDestroyHook(this, asyncId, destroyed); + } + } + + runInAsyncScope(fn, thisArg, ...args) { + const asyncId = this[async_id_symbol]; + emitBefore(asyncId, this[trigger_async_id_symbol], this); + + const contextFrame = this[contextFrameSymbol]; + const prior = AsyncContextFrame.exchange(contextFrame); + try { + return ReflectApply(fn, thisArg, args); + } finally { + AsyncContextFrame.set(prior); + if (hasAsyncIdStack()) + emitAfter(asyncId); + } + } + + emitDestroy() { + if (this[destroyedSymbol] !== undefined) { + this[destroyedSymbol].destroyed = true; + } + emitDestroy(this[async_id_symbol]); + return this; + } + + asyncId() { + return this[async_id_symbol]; + } + + triggerAsyncId() { + return this[trigger_async_id_symbol]; + } + + bind(fn, thisArg) { + validateFunction(fn, 'fn'); + let bound; + if (thisArg === undefined) { + const resource = this; + bound = function(...args) { + ArrayPrototypeUnshift(args, fn, this); + return ReflectApply(resource.runInAsyncScope, resource, args); + }; + } else { + bound = FunctionPrototypeBind(this.runInAsyncScope, this, fn, thisArg); + } + let self = this; + ObjectDefineProperties(bound, { + 'length': { + __proto__: null, + configurable: true, + enumerable: false, + value: fn.length, + writable: false, + }, + 'asyncResource': { + __proto__: null, + configurable: true, + enumerable: true, + get: deprecate(function() { + return self; + }, 'The asyncResource property on bound functions is deprecated', 'DEP0172'), + set: deprecate(function(val) { + self = val; + }, 'The asyncResource property on bound functions is deprecated', 'DEP0172'), + }, + }); + return bound; + } + + static bind(fn, type, thisArg) { + type ||= fn.name; + return (new AsyncResource(type || 'bound-anonymous-fn')).bind(fn, thisArg); + } +} + +// Placing all exports down here because the exported classes won't export +// otherwise. +module.exports = { + // Public API + get AsyncLocalStorage() { + return AsyncContextFrame.enabled ? + require('internal/async_local_storage/async_context_frame') : + require('internal/async_local_storage/async_hooks'); + }, + createHook, + executionAsyncId, + triggerAsyncId, + executionAsyncResource, + asyncWrapProviders: ObjectFreeze({ __proto__: null, ...asyncWrap.Providers }), + // Embedder API + AsyncResource, +}; \ No newline at end of file diff --git a/.codesandbox/node/buffer.js b/.codesandbox/node/buffer.js new file mode 100644 index 00000000..06bfebc7 --- /dev/null +++ b/.codesandbox/node/buffer.js @@ -0,0 +1,1365 @@ +'use strict'; + +const { + Array, + ArrayBufferIsView, + ArrayIsArray, + ArrayPrototypeForEach, + MathFloor, + MathMin, + MathTrunc, + NumberIsInteger, + NumberIsNaN, + NumberMAX_SAFE_INTEGER, + NumberMIN_SAFE_INTEGER, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectPrototypeHasOwnProperty, + ObjectSetPrototypeOf, + RegExpPrototypeSymbolReplace, + StringPrototypeCharCodeAt, + StringPrototypeSlice, + StringPrototypeToLowerCase, + StringPrototypeTrim, + SymbolSpecies, + SymbolToPrimitive, + TypedArrayPrototypeFill, + TypedArrayPrototypeGetBuffer, + TypedArrayPrototypeGetByteLength, + TypedArrayPrototypeGetByteOffset, + TypedArrayPrototypeGetLength, + TypedArrayPrototypeSet, + TypedArrayPrototypeSlice, + Uint8Array, + Uint8ArrayPrototype, +} = primordials; + +const { + byteLengthUtf8, + compare: _compare, + compareOffset, + copy: _copy, + fill: bindingFill, + isAscii: bindingIsAscii, + isUtf8: bindingIsUtf8, + indexOfBuffer, + indexOfNumber, + indexOfString, + swap16: _swap16, + swap32: _swap32, + swap64: _swap64, + kMaxLength, + kStringMaxLength, + atob: _atob, + btoa: _btoa, +} = internalBinding('buffer'); +const { + constants: { + ALL_PROPERTIES, + ONLY_ENUMERABLE, + }, + getOwnNonIndexProperties, + isInsideNodeModules, +} = internalBinding('util'); +const { + customInspectSymbol, + lazyDOMException, + normalizeEncoding, + kIsEncodingSymbol, + defineLazyProperties, + encodingsMap, + deprecate, +} = require('internal/util'); +const { + isAnyArrayBuffer, + isArrayBufferView, + isUint8Array, + isTypedArray, +} = require('internal/util/types'); +const { + inspect: utilInspect, +} = require('internal/util/inspect'); + +const { + codes: { + ERR_BUFFER_OUT_OF_BOUNDS, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_BUFFER_SIZE, + ERR_MISSING_ARGS, + ERR_OUT_OF_RANGE, + ERR_UNKNOWN_ENCODING, + }, + genericNodeError, +} = require('internal/errors'); +const { + validateArray, + validateBuffer, + validateInteger, + validateNumber, + validateString, +} = require('internal/validators'); +// Provide validateInteger() but with kMaxLength as the default maximum value. +const validateOffset = (value, name, min = 0, max = kMaxLength) => + validateInteger(value, name, min, max); + +const { + FastBuffer, + markAsUntransferable, + addBufferPrototypeMethods, + createUnsafeBuffer, +} = require('internal/buffer'); + +FastBuffer.prototype.constructor = Buffer; +Buffer.prototype = FastBuffer.prototype; +addBufferPrototypeMethods(Buffer.prototype); + +const constants = ObjectDefineProperties({}, { + MAX_LENGTH: { + __proto__: null, + value: kMaxLength, + writable: false, + enumerable: true, + }, + MAX_STRING_LENGTH: { + __proto__: null, + value: kStringMaxLength, + writable: false, + enumerable: true, + }, +}); + +Buffer.poolSize = 8 * 1024; +let poolSize, poolOffset, allocPool, allocBuffer; + +function createPool() { + poolSize = Buffer.poolSize; + allocBuffer = createUnsafeBuffer(poolSize); + allocPool = allocBuffer.buffer; + markAsUntransferable(allocPool); + poolOffset = 0; +} +createPool(); + +function alignPool() { + // Ensure aligned slices + if (poolOffset & 0x7) { + poolOffset |= 0x7; + poolOffset++; + } +} + +let bufferWarningAlreadyEmitted = false; +let nodeModulesCheckCounter = 0; +const bufferWarning = 'Buffer() is deprecated due to security and usability ' + + 'issues. Please use the Buffer.alloc(), ' + + 'Buffer.allocUnsafe(), or Buffer.from() methods instead.'; + +function showFlaggedDeprecation() { + if (bufferWarningAlreadyEmitted || + ++nodeModulesCheckCounter > 10000 || + (!require('internal/options').getOptionValue('--pending-deprecation') && + isInsideNodeModules(100, true))) { + // We don't emit a warning, because we either: + // - Already did so, or + // - Already checked too many times whether a call is coming + // from node_modules and want to stop slowing down things, or + // - We aren't running with `--pending-deprecation` enabled, + // and the code is inside `node_modules`. + // - We found node_modules in up to the topmost 100 frames, or + // there are more than 100 frames and we don't want to search anymore. + return; + } + + process.emitWarning(bufferWarning, 'DeprecationWarning', 'DEP0005'); + bufferWarningAlreadyEmitted = true; +} + +function toInteger(n, defaultVal) { + n = +n; + if (!NumberIsNaN(n) && + n >= NumberMIN_SAFE_INTEGER && + n <= NumberMAX_SAFE_INTEGER) { + return ((n % 1) === 0 ? n : MathFloor(n)); + } + return defaultVal; +} + +function copyImpl(source, target, targetStart, sourceStart, sourceEnd) { + if (!ArrayBufferIsView(source)) + throw new ERR_INVALID_ARG_TYPE('source', ['Buffer', 'Uint8Array'], source); + if (!ArrayBufferIsView(target)) + throw new ERR_INVALID_ARG_TYPE('target', ['Buffer', 'Uint8Array'], target); + + if (targetStart === undefined) { + targetStart = 0; + } else { + targetStart = NumberIsInteger(targetStart) ? targetStart : toInteger(targetStart, 0); + if (targetStart < 0) + throw new ERR_OUT_OF_RANGE('targetStart', '>= 0', targetStart); + } + + if (sourceStart === undefined) { + sourceStart = 0; + } else { + sourceStart = NumberIsInteger(sourceStart) ? sourceStart : toInteger(sourceStart, 0); + if (sourceStart < 0 || sourceStart > source.byteLength) + throw new ERR_OUT_OF_RANGE('sourceStart', `>= 0 && <= ${source.byteLength}`, sourceStart); + } + + if (sourceEnd === undefined) { + sourceEnd = source.byteLength; + } else { + sourceEnd = NumberIsInteger(sourceEnd) ? sourceEnd : toInteger(sourceEnd, 0); + if (sourceEnd < 0) + throw new ERR_OUT_OF_RANGE('sourceEnd', '>= 0', sourceEnd); + } + + if (targetStart >= target.byteLength || sourceStart >= sourceEnd) + return 0; + + return _copyActual(source, target, targetStart, sourceStart, sourceEnd); +} + +function _copyActual(source, target, targetStart, sourceStart, sourceEnd) { + if (sourceEnd - sourceStart > target.byteLength - targetStart) + sourceEnd = sourceStart + target.byteLength - targetStart; + + let nb = sourceEnd - sourceStart; + const sourceLen = source.byteLength - sourceStart; + if (nb > sourceLen) + nb = sourceLen; + + if (nb <= 0) + return 0; + + _copy(source, target, targetStart, sourceStart, nb); + + return nb; +} + +/** + * The Buffer() constructor is deprecated in documentation and should not be + * used moving forward. Rather, developers should use one of the three new + * factory APIs: Buffer.from(), Buffer.allocUnsafe() or Buffer.alloc() based on + * their specific needs. There is no runtime deprecation because of the extent + * to which the Buffer constructor is used in the ecosystem currently -- a + * runtime deprecation would introduce too much breakage at this time. It's not + * likely that the Buffer constructors would ever actually be removed. + * Deprecation Code: DEP0005 + * @returns {Buffer} + */ +function Buffer(arg, encodingOrOffset, length) { + showFlaggedDeprecation(); + // Common case. + if (typeof arg === 'number') { + if (typeof encodingOrOffset === 'string') { + throw new ERR_INVALID_ARG_TYPE('string', 'string', arg); + } + return Buffer.alloc(arg); + } + return Buffer.from(arg, encodingOrOffset, length); +} + +ObjectDefineProperty(Buffer, SymbolSpecies, { + __proto__: null, + enumerable: false, + configurable: true, + get() { return FastBuffer; }, +}); + +/** + * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError + * if value is a number. + * Buffer.from(str[, encoding]) + * Buffer.from(array) + * Buffer.from(buffer) + * Buffer.from(arrayBuffer[, byteOffset[, length]]) + * @param {any} value + * @param {BufferEncoding|number} encodingOrOffset + * @param {number} [length] + * @returns {Buffer} + */ +Buffer.from = function from(value, encodingOrOffset, length) { + if (typeof value === 'string') + return fromString(value, encodingOrOffset); + + if (typeof value === 'object' && value !== null) { + if (isAnyArrayBuffer(value)) + return fromArrayBuffer(value, encodingOrOffset, length); + + const valueOf = value.valueOf && value.valueOf(); + if (valueOf != null && + valueOf !== value && + (typeof valueOf === 'string' || typeof valueOf === 'object')) { + return from(valueOf, encodingOrOffset, length); + } + + const b = fromObject(value); + if (b) + return b; + + if (typeof value[SymbolToPrimitive] === 'function') { + const primitive = value[SymbolToPrimitive]('string'); + if (typeof primitive === 'string') { + return fromString(primitive, encodingOrOffset); + } + } + } + + throw new ERR_INVALID_ARG_TYPE( + 'first argument', + ['string', 'Buffer', 'ArrayBuffer', 'Array', 'Array-like Object'], + value, + ); +}; + +/** + * Creates the Buffer as a copy of the underlying ArrayBuffer of the view + * rather than the contents of the view. + * @param {TypedArray} view + * @param {number} [offset] + * @param {number} [length] + * @returns {Buffer} + */ +Buffer.copyBytesFrom = function copyBytesFrom(view, offset, length) { + if (!isTypedArray(view)) { + throw new ERR_INVALID_ARG_TYPE('view', [ 'TypedArray' ], view); + } + + const viewLength = TypedArrayPrototypeGetLength(view); + if (viewLength === 0) { + return Buffer.alloc(0); + } + + if (offset !== undefined || length !== undefined) { + if (offset !== undefined) { + validateInteger(offset, 'offset', 0); + if (offset >= viewLength) return Buffer.alloc(0); + } else { + offset = 0; + } + let end; + if (length !== undefined) { + validateInteger(length, 'length', 0); + end = offset + length; + } else { + end = viewLength; + } + + view = TypedArrayPrototypeSlice(view, offset, end); + } + + return fromArrayLike(new Uint8Array( + TypedArrayPrototypeGetBuffer(view), + TypedArrayPrototypeGetByteOffset(view), + TypedArrayPrototypeGetByteLength(view))); +}; + +// Identical to the built-in %TypedArray%.of(), but avoids using the deprecated +// Buffer() constructor. Must use arrow function syntax to avoid automatically +// adding a `prototype` property and making the function a constructor. +// +// Refs: https://tc39.github.io/ecma262/#sec-%typedarray%.of +// Refs: https://esdiscuss.org/topic/isconstructor#content-11 +const of = (...items) => { + const newObj = createUnsafeBuffer(items.length); + for (let k = 0; k < items.length; k++) + newObj[k] = items[k]; + return newObj; +}; +Buffer.of = of; + +ObjectSetPrototypeOf(Buffer, Uint8Array); + +/** + * Creates a new filled Buffer instance. + * alloc(size[, fill[, encoding]]) + * @returns {FastBuffer} + */ +Buffer.alloc = function alloc(size, fill, encoding) { + validateNumber(size, 'size', 0, kMaxLength); + if (fill !== undefined && fill !== 0 && size > 0) { + const buf = createUnsafeBuffer(size); + return _fill(buf, fill, 0, buf.length, encoding); + } + return new FastBuffer(size); +}; + +/** + * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer + * instance. If `--zero-fill-buffers` is set, will zero-fill the buffer. + * @returns {FastBuffer} + */ +Buffer.allocUnsafe = function allocUnsafe(size) { + validateNumber(size, 'size', 0, kMaxLength); + return allocate(size); +}; + +/** + * Equivalent to SlowBuffer(num), by default creates a non-zero-filled + * Buffer instance that is not allocated off the pre-initialized pool. + * If `--zero-fill-buffers` is set, will zero-fill the buffer. + * @param {number} size + * @returns {FastBuffer|undefined} + */ +Buffer.allocUnsafeSlow = function allocUnsafeSlow(size) { + validateNumber(size, 'size', 0, kMaxLength); + return createUnsafeBuffer(size); +}; + +// If --zero-fill-buffers command line argument is set, a zero-filled +// buffer is returned. +function SlowBuffer(size) { + validateNumber(size, 'size', 0, kMaxLength); + return createUnsafeBuffer(size); +} + +ObjectSetPrototypeOf(SlowBuffer.prototype, Uint8ArrayPrototype); +ObjectSetPrototypeOf(SlowBuffer, Uint8Array); + +function allocate(size) { + if (size <= 0) { + return new FastBuffer(); + } + if (size < (Buffer.poolSize >>> 1)) { + if (size > (poolSize - poolOffset)) + createPool(); + const b = new FastBuffer(allocPool, poolOffset, size); + poolOffset += size; + alignPool(); + return b; + } + return createUnsafeBuffer(size); +} + +function fromStringFast(string, ops) { + const maxLength = Buffer.poolSize >>> 1; + + let length = string.length; // Min length + + if (length >= maxLength) + return createFromString(string, ops); + + length *= 4; // Max length (4 bytes per character) + + if (length >= maxLength) + length = ops.byteLength(string); // Actual length + + if (length >= maxLength) + return createFromString(string, ops, length); + + if (length > (poolSize - poolOffset)) + createPool(); + + const actual = ops.write(allocBuffer, string, poolOffset, length); + const b = new FastBuffer(allocPool, poolOffset, actual); + + poolOffset += actual; + alignPool(); + return b; +} + +function createFromString(string, ops, length = ops.byteLength(string)) { + const buf = Buffer.allocUnsafeSlow(length); + const actual = ops.write(buf, string, 0, length); + return actual < length ? new FastBuffer(buf.buffer, 0, actual) : buf; +} + +function fromString(string, encoding) { + let ops; + if (!encoding || encoding === 'utf8' || typeof encoding !== 'string') { + ops = encodingOps.utf8; + } else { + ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + } + + return string.length === 0 ? new FastBuffer() : fromStringFast(string, ops); +} + +function fromArrayBuffer(obj, byteOffset, length) { + // Convert byteOffset to integer + if (byteOffset === undefined) { + byteOffset = 0; + } else { + byteOffset = +byteOffset; + if (NumberIsNaN(byteOffset)) + byteOffset = 0; + } + + const maxLength = obj.byteLength - byteOffset; + + if (maxLength < 0) + throw new ERR_BUFFER_OUT_OF_BOUNDS('offset'); + + if (length !== undefined) { + // Convert length to non-negative integer. + length = +length; + if (length > 0) { + if (length > maxLength) + throw new ERR_BUFFER_OUT_OF_BOUNDS('length'); + } else { + length = 0; + } + } + + return new FastBuffer(obj, byteOffset, length); +} + +function fromArrayLike(obj) { + if (obj.length <= 0) + return new FastBuffer(); + if (obj.length < (Buffer.poolSize >>> 1)) { + if (obj.length > (poolSize - poolOffset)) + createPool(); + const b = new FastBuffer(allocPool, poolOffset, obj.length); + TypedArrayPrototypeSet(b, obj, 0); + poolOffset += obj.length; + alignPool(); + return b; + } + return new FastBuffer(obj); +} + +function fromObject(obj) { + if (obj.length !== undefined || isAnyArrayBuffer(obj.buffer)) { + if (typeof obj.length !== 'number') { + return new FastBuffer(); + } + return fromArrayLike(obj); + } + + if (obj.type === 'Buffer' && ArrayIsArray(obj.data)) { + return fromArrayLike(obj.data); + } +} + +// Static methods + +Buffer.isBuffer = function isBuffer(b) { + return b instanceof Buffer; +}; + +Buffer.compare = function compare(buf1, buf2) { + if (!isUint8Array(buf1)) { + throw new ERR_INVALID_ARG_TYPE('buf1', ['Buffer', 'Uint8Array'], buf1); + } + + if (!isUint8Array(buf2)) { + throw new ERR_INVALID_ARG_TYPE('buf2', ['Buffer', 'Uint8Array'], buf2); + } + + if (buf1 === buf2) { + return 0; + } + + return _compare(buf1, buf2); +}; + +Buffer.isEncoding = function isEncoding(encoding) { + return typeof encoding === 'string' && encoding.length !== 0 && + normalizeEncoding(encoding) !== undefined; +}; +Buffer[kIsEncodingSymbol] = Buffer.isEncoding; + +Buffer.concat = function concat(list, length) { + validateArray(list, 'list'); + + if (list.length === 0) + return new FastBuffer(); + + if (length === undefined) { + length = 0; + for (let i = 0; i < list.length; i++) { + if (list[i].length) { + length += list[i].length; + } + } + } else { + validateOffset(length, 'length'); + } + + const buffer = Buffer.allocUnsafe(length); + let pos = 0; + for (let i = 0; i < list.length; i++) { + const buf = list[i]; + if (!isUint8Array(buf)) { + // TODO(BridgeAR): This should not be of type ERR_INVALID_ARG_TYPE. + // Instead, find the proper error code for this. + throw new ERR_INVALID_ARG_TYPE( + `list[${i}]`, ['Buffer', 'Uint8Array'], list[i]); + } + pos += _copyActual(buf, buffer, pos, 0, buf.length); + } + + // Note: `length` is always equal to `buffer.length` at this point + if (pos < length) { + // Zero-fill the remaining bytes if the specified `length` was more than + // the actual total length, i.e. if we have some remaining allocated bytes + // there were not initialized. + TypedArrayPrototypeFill(buffer, 0, pos, length); + } + + return buffer; +}; + +function base64ByteLength(str, bytes) { + // Handle padding + if (StringPrototypeCharCodeAt(str, bytes - 1) === 0x3D) + bytes--; + if (bytes > 1 && StringPrototypeCharCodeAt(str, bytes - 1) === 0x3D) + bytes--; + + // Base64 ratio: 3/4 + return (bytes * 3) >>> 2; +} + +const encodingOps = { + utf8: { + encoding: 'utf8', + encodingVal: encodingsMap.utf8, + byteLength: byteLengthUtf8, + write: (buf, string, offset, len) => buf.utf8Write(string, offset, len), + slice: (buf, start, end) => buf.utf8Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf8, dir), + }, + ucs2: { + encoding: 'ucs2', + encodingVal: encodingsMap.utf16le, + byteLength: (string) => string.length * 2, + write: (buf, string, offset, len) => buf.ucs2Write(string, offset, len), + slice: (buf, start, end) => buf.ucs2Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf16le, dir), + }, + utf16le: { + encoding: 'utf16le', + encodingVal: encodingsMap.utf16le, + byteLength: (string) => string.length * 2, + write: (buf, string, offset, len) => buf.ucs2Write(string, offset, len), + slice: (buf, start, end) => buf.ucs2Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf16le, dir), + }, + latin1: { + encoding: 'latin1', + encodingVal: encodingsMap.latin1, + byteLength: (string) => string.length, + write: (buf, string, offset, len) => buf.latin1Write(string, offset, len), + slice: (buf, start, end) => buf.latin1Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.latin1, dir), + }, + ascii: { + encoding: 'ascii', + encodingVal: encodingsMap.ascii, + byteLength: (string) => string.length, + write: (buf, string, offset, len) => buf.asciiWrite(string, offset, len), + slice: (buf, start, end) => buf.asciiSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.ascii), + byteOffset, + encodingsMap.ascii, + dir), + }, + base64: { + encoding: 'base64', + encodingVal: encodingsMap.base64, + byteLength: (string) => base64ByteLength(string, string.length), + write: (buf, string, offset, len) => buf.base64Write(string, offset, len), + slice: (buf, start, end) => buf.base64Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.base64), + byteOffset, + encodingsMap.base64, + dir), + }, + base64url: { + encoding: 'base64url', + encodingVal: encodingsMap.base64url, + byteLength: (string) => base64ByteLength(string, string.length), + write: (buf, string, offset, len) => + buf.base64urlWrite(string, offset, len), + slice: (buf, start, end) => buf.base64urlSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.base64url), + byteOffset, + encodingsMap.base64url, + dir), + }, + hex: { + encoding: 'hex', + encodingVal: encodingsMap.hex, + byteLength: (string) => string.length >>> 1, + write: (buf, string, offset, len) => buf.hexWrite(string, offset, len), + slice: (buf, start, end) => buf.hexSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.hex), + byteOffset, + encodingsMap.hex, + dir), + }, +}; +function getEncodingOps(encoding) { + encoding += ''; + switch (encoding.length) { + case 4: + if (encoding === 'utf8') return encodingOps.utf8; + if (encoding === 'ucs2') return encodingOps.ucs2; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'utf8') return encodingOps.utf8; + if (encoding === 'ucs2') return encodingOps.ucs2; + break; + case 5: + if (encoding === 'utf-8') return encodingOps.utf8; + if (encoding === 'ascii') return encodingOps.ascii; + if (encoding === 'ucs-2') return encodingOps.ucs2; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'utf-8') return encodingOps.utf8; + if (encoding === 'ascii') return encodingOps.ascii; + if (encoding === 'ucs-2') return encodingOps.ucs2; + break; + case 7: + if (encoding === 'utf16le' || + StringPrototypeToLowerCase(encoding) === 'utf16le') + return encodingOps.utf16le; + break; + case 8: + if (encoding === 'utf-16le' || + StringPrototypeToLowerCase(encoding) === 'utf-16le') + return encodingOps.utf16le; + break; + case 6: + if (encoding === 'latin1' || encoding === 'binary') + return encodingOps.latin1; + if (encoding === 'base64') return encodingOps.base64; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'latin1' || encoding === 'binary') + return encodingOps.latin1; + if (encoding === 'base64') return encodingOps.base64; + break; + case 3: + if (encoding === 'hex' || StringPrototypeToLowerCase(encoding) === 'hex') + return encodingOps.hex; + break; + case 9: + if (encoding === 'base64url' || + StringPrototypeToLowerCase(encoding) === 'base64url') + return encodingOps.base64url; + break; + } +} + +function byteLength(string, encoding) { + if (typeof string !== 'string') { + if (isArrayBufferView(string) || isAnyArrayBuffer(string)) { + return string.byteLength; + } + + throw new ERR_INVALID_ARG_TYPE( + 'string', ['string', 'Buffer', 'ArrayBuffer'], string, + ); + } + + const len = string.length; + if (len === 0) + return 0; + + if (!encoding || encoding === 'utf8') { + return byteLengthUtf8(string); + } + + if (encoding === 'ascii') { + return len; + } + + const ops = getEncodingOps(encoding); + if (ops === undefined) { + // TODO (ronag): Makes more sense to throw here. + // throw new ERR_UNKNOWN_ENCODING(encoding); + return byteLengthUtf8(string); + } + + return ops.byteLength(string); +} + +Buffer.byteLength = byteLength; + +// For backwards compatibility. +ObjectDefineProperty(Buffer.prototype, 'parent', { + __proto__: null, + enumerable: true, + get() { + if (!(this instanceof Buffer)) + return undefined; + return this.buffer; + }, +}); +ObjectDefineProperty(Buffer.prototype, 'offset', { + __proto__: null, + enumerable: true, + get() { + if (!(this instanceof Buffer)) + return undefined; + return this.byteOffset; + }, +}); + +Buffer.prototype.copy = + function copy(target, targetStart, sourceStart, sourceEnd) { + return copyImpl(this, target, targetStart, sourceStart, sourceEnd); + }; + +// No need to verify that "buf.length <= MAX_UINT32" since it's a read-only +// property of a typed array. +// This behaves neither like String nor Uint8Array in that we set start/end +// to their upper/lower bounds if the value passed is out of range. +Buffer.prototype.toString = function toString(encoding, start, end) { + if (arguments.length === 0) { + return this.utf8Slice(0, this.length); + } + + const len = this.length; + + if (start <= 0) + start = 0; + else if (start >= len) + return ''; + else + start = MathTrunc(start) || 0; + + if (end === undefined || end > len) + end = len; + else + end = MathTrunc(end) || 0; + + if (end <= start) + return ''; + + if (encoding === undefined) + return this.utf8Slice(start, end); + + const ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + + return ops.slice(this, start, end); +}; + +Buffer.prototype.equals = function equals(otherBuffer) { + if (!isUint8Array(otherBuffer)) { + throw new ERR_INVALID_ARG_TYPE( + 'otherBuffer', ['Buffer', 'Uint8Array'], otherBuffer); + } + + if (this === otherBuffer) + return true; + const len = TypedArrayPrototypeGetByteLength(this); + if (len !== TypedArrayPrototypeGetByteLength(otherBuffer)) + return false; + + return len === 0 || _compare(this, otherBuffer) === 0; +}; + +let INSPECT_MAX_BYTES = 50; +// Override how buffers are presented by util.inspect(). +Buffer.prototype[customInspectSymbol] = function inspect(recurseTimes, ctx) { + const max = INSPECT_MAX_BYTES; + const actualMax = MathMin(max, this.length); + const remaining = this.length - max; + let str = StringPrototypeTrim(RegExpPrototypeSymbolReplace( + /(.{2})/g, this.hexSlice(0, actualMax), '$1 ')); + if (remaining > 0) + str += ` ... ${remaining} more byte${remaining > 1 ? 's' : ''}`; + // Inspect special properties as well, if possible. + if (ctx) { + let extras = false; + const filter = ctx.showHidden ? ALL_PROPERTIES : ONLY_ENUMERABLE; + const obj = { __proto__: null }; + ArrayPrototypeForEach(getOwnNonIndexProperties(this, filter), + (key) => { + extras = true; + obj[key] = this[key]; + }); + if (extras) { + if (this.length !== 0) + str += ', '; + // '[Object: null prototype] {'.length === 26 + // This is guarded with a test. + str += StringPrototypeSlice(utilInspect(obj, { + ...ctx, + breakLength: Infinity, + compact: true, + }), 27, -2); + } + } + let constructorName = 'Buffer'; + try { + const { constructor } = this; + if (typeof constructor === 'function' && ObjectPrototypeHasOwnProperty(constructor, 'name')) { + constructorName = constructor.name; + } + } catch { /* Ignore error and use default name */ } + return `<${constructorName} ${str}>`; +}; +Buffer.prototype.inspect = Buffer.prototype[customInspectSymbol]; + +Buffer.prototype.compare = function compare(target, + targetStart, + targetEnd, + sourceStart, + sourceEnd) { + if (!isUint8Array(target)) { + throw new ERR_INVALID_ARG_TYPE('target', ['Buffer', 'Uint8Array'], target); + } + if (arguments.length === 1) + return _compare(this, target); + + if (targetStart === undefined) + targetStart = 0; + else + validateOffset(targetStart, 'targetStart'); + + if (targetEnd === undefined) + targetEnd = target.length; + else + validateOffset(targetEnd, 'targetEnd', 0, target.length); + + if (sourceStart === undefined) + sourceStart = 0; + else + validateOffset(sourceStart, 'sourceStart'); + + if (sourceEnd === undefined) + sourceEnd = this.length; + else + validateOffset(sourceEnd, 'sourceEnd', 0, this.length); + + if (sourceStart >= sourceEnd) + return (targetStart >= targetEnd ? 0 : -1); + if (targetStart >= targetEnd) + return 1; + + return compareOffset(this, target, targetStart, sourceStart, targetEnd, + sourceEnd); +}; + +// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`, +// OR the last index of `val` in `buffer` at offset <= `byteOffset`. +// +// Arguments: +// - buffer - a Buffer to search +// - val - a string, Buffer, or number +// - byteOffset - an index into `buffer`; will be clamped to an int32 +// - encoding - an optional encoding, relevant if val is a string +// - dir - true for indexOf, false for lastIndexOf +function bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) { + validateBuffer(buffer); + + if (typeof byteOffset === 'string') { + encoding = byteOffset; + byteOffset = undefined; + } else if (byteOffset > 0x7fffffff) { + byteOffset = 0x7fffffff; + } else if (byteOffset < -0x80000000) { + byteOffset = -0x80000000; + } + // Coerce to Number. Values like null and [] become 0. + byteOffset = +byteOffset; + // If the offset is undefined, "foo", {}, coerces to NaN, search whole buffer. + if (NumberIsNaN(byteOffset)) { + byteOffset = dir ? 0 : (buffer.length || buffer.byteLength); + } + dir = !!dir; // Cast to bool. + + if (typeof val === 'number') + return indexOfNumber(buffer, val >>> 0, byteOffset, dir); + + let ops; + if (encoding === undefined) + ops = encodingOps.utf8; + else + ops = getEncodingOps(encoding); + + if (typeof val === 'string') { + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + return ops.indexOf(buffer, val, byteOffset, dir); + } + + if (isUint8Array(val)) { + const encodingVal = + (ops === undefined ? encodingsMap.utf8 : ops.encodingVal); + return indexOfBuffer(buffer, val, byteOffset, encodingVal, dir); + } + + throw new ERR_INVALID_ARG_TYPE( + 'value', ['number', 'string', 'Buffer', 'Uint8Array'], val, + ); +} + +Buffer.prototype.indexOf = function indexOf(val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, true); +}; + +Buffer.prototype.lastIndexOf = function lastIndexOf(val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, false); +}; + +Buffer.prototype.includes = function includes(val, byteOffset, encoding) { + return this.indexOf(val, byteOffset, encoding) !== -1; +}; + +// Usage: +// buffer.fill(number[, offset[, end]]) +// buffer.fill(buffer[, offset[, end]]) +// buffer.fill(string[, offset[, end]][, encoding]) +Buffer.prototype.fill = function fill(value, offset, end, encoding) { + return _fill(this, value, offset, end, encoding); +}; + +function _fill(buf, value, offset, end, encoding) { + if (typeof value === 'string') { + if (offset === undefined || typeof offset === 'string') { + encoding = offset; + offset = 0; + end = buf.length; + } else if (typeof end === 'string') { + encoding = end; + end = buf.length; + } + + const normalizedEncoding = normalizeEncoding(encoding); + if (normalizedEncoding === undefined) { + validateString(encoding, 'encoding'); + throw new ERR_UNKNOWN_ENCODING(encoding); + } + + if (value.length === 0) { + // If value === '' default to zero. + value = 0; + } else if (value.length === 1) { + // Fast path: If `value` fits into a single byte, use that numeric value. + if (normalizedEncoding === 'utf8') { + const code = StringPrototypeCharCodeAt(value, 0); + if (code < 128) { + value = code; + } + } else if (normalizedEncoding === 'latin1') { + value = StringPrototypeCharCodeAt(value, 0); + } + } + } else { + encoding = undefined; + } + + if (offset === undefined) { + offset = 0; + end = buf.length; + } else { + validateOffset(offset, 'offset'); + // Invalid ranges are not set to a default, so can range check early. + if (end === undefined) { + end = buf.length; + } else { + validateOffset(end, 'end', 0, buf.length); + } + if (offset >= end) + return buf; + } + + + if (typeof value === 'number') { + // OOB check + const byteLen = TypedArrayPrototypeGetByteLength(buf); + const fillLength = end - offset; + if (offset > end || fillLength + offset > byteLen) + throw new ERR_BUFFER_OUT_OF_BOUNDS(); + + TypedArrayPrototypeFill(buf, value, offset, end); + } else { + const res = bindingFill(buf, value, offset, end, encoding); + if (res < 0) { + if (res === -1) + throw new ERR_INVALID_ARG_VALUE('value', value); + throw new ERR_BUFFER_OUT_OF_BOUNDS(); + } + } + + return buf; +} + +Buffer.prototype.write = function write(string, offset, length, encoding) { + // Buffer#write(string); + if (offset === undefined) { + return this.utf8Write(string, 0, this.length); + } + // Buffer#write(string, encoding) + if (length === undefined && typeof offset === 'string') { + encoding = offset; + length = this.length; + offset = 0; + + // Buffer#write(string, offset[, length][, encoding]) + } else { + validateOffset(offset, 'offset', 0, this.length); + + const remaining = this.length - offset; + + if (length === undefined) { + length = remaining; + } else if (typeof length === 'string') { + encoding = length; + length = remaining; + } else { + validateOffset(length, 'length', 0, this.length); + if (length > remaining) + length = remaining; + } + } + + if (!encoding || encoding === 'utf8') + return this.utf8Write(string, offset, length); + if (encoding === 'ascii') + return this.asciiWrite(string, offset, length); + + const ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + return ops.write(this, string, offset, length); +}; + +Buffer.prototype.toJSON = function toJSON() { + if (this.length > 0) { + const data = new Array(this.length); + for (let i = 0; i < this.length; ++i) + data[i] = this[i]; + return { type: 'Buffer', data }; + } + return { type: 'Buffer', data: [] }; +}; + +function adjustOffset(offset, length) { + // Use Math.trunc() to convert offset to an integer value that can be larger + // than an Int32. Hence, don't use offset | 0 or similar techniques. + offset = MathTrunc(offset); + if (offset === 0) { + return 0; + } + if (offset < 0) { + offset += length; + return offset > 0 ? offset : 0; + } + if (offset < length) { + return offset; + } + return NumberIsNaN(offset) ? 0 : length; +} + +Buffer.prototype.subarray = function subarray(start, end) { + const srcLength = this.length; + start = adjustOffset(start, srcLength); + end = end !== undefined ? adjustOffset(end, srcLength) : srcLength; + const newLength = end > start ? end - start : 0; + return new FastBuffer(this.buffer, this.byteOffset + start, newLength); +}; + +Buffer.prototype.slice = function slice(start, end) { + return this.subarray(start, end); +}; + +function swap(b, n, m) { + const i = b[n]; + b[n] = b[m]; + b[m] = i; +} + +Buffer.prototype.swap16 = function swap16() { + // For Buffer.length < 128, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 2 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('16-bits'); + if (len < 128) { + for (let i = 0; i < len; i += 2) + swap(this, i, i + 1); + return this; + } + return _swap16(this); +}; + +Buffer.prototype.swap32 = function swap32() { + // For Buffer.length < 192, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 4 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('32-bits'); + if (len < 192) { + for (let i = 0; i < len; i += 4) { + swap(this, i, i + 3); + swap(this, i + 1, i + 2); + } + return this; + } + return _swap32(this); +}; + +Buffer.prototype.swap64 = function swap64() { + // For Buffer.length < 192, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 8 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('64-bits'); + if (len < 192) { + for (let i = 0; i < len; i += 8) { + swap(this, i, i + 7); + swap(this, i + 1, i + 6); + swap(this, i + 2, i + 5); + swap(this, i + 3, i + 4); + } + return this; + } + return _swap64(this); +}; + +Buffer.prototype.toLocaleString = Buffer.prototype.toString; + +let transcode; +if (internalBinding('config').hasIntl) { + const { + icuErrName, + transcode: _transcode, + } = internalBinding('icu'); + + // Transcodes the Buffer from one encoding to another, returning a new + // Buffer instance. + transcode = function transcode(source, fromEncoding, toEncoding) { + if (!isUint8Array(source)) { + throw new ERR_INVALID_ARG_TYPE('source', + ['Buffer', 'Uint8Array'], source); + } + if (source.length === 0) return Buffer.alloc(0); + + fromEncoding = normalizeEncoding(fromEncoding) || fromEncoding; + toEncoding = normalizeEncoding(toEncoding) || toEncoding; + const result = _transcode(source, fromEncoding, toEncoding); + if (typeof result !== 'number') + return result; + + const code = icuErrName(result); + const err = genericNodeError( + `Unable to transcode Buffer [${code}]`, + { code: code, errno: result }, + ); + throw err; + }; +} + +function btoa(input) { + // The implementation here has not been performance optimized in any way and + // should not be. + // Refs: https://github.com/nodejs/node/pull/38433#issuecomment-828426932 + if (arguments.length === 0) { + throw new ERR_MISSING_ARGS('input'); + } + const result = _btoa(`${input}`); + if (result === -1) { + throw lazyDOMException('Invalid character', 'InvalidCharacterError'); + } + return result; +} + +function atob(input) { + if (arguments.length === 0) { + throw new ERR_MISSING_ARGS('input'); + } + + const result = _atob(`${input}`); + + switch (result) { + case -2: // Invalid character + throw lazyDOMException('Invalid character', 'InvalidCharacterError'); + case -1: // Single character remained + throw lazyDOMException( + 'The string to be decoded is not correctly encoded.', + 'InvalidCharacterError'); + case -3: // Possible overflow + // TODO(@anonrig): Throw correct error in here. + throw lazyDOMException('The input causes overflow.', 'InvalidCharacterError'); + default: + return result; + } +} + +function isUtf8(input) { + if (isTypedArray(input) || isAnyArrayBuffer(input)) { + return bindingIsUtf8(input); + } + + throw new ERR_INVALID_ARG_TYPE('input', ['ArrayBuffer', 'Buffer', 'TypedArray'], input); +} + +function isAscii(input) { + if (isTypedArray(input) || isAnyArrayBuffer(input)) { + return bindingIsAscii(input); + } + + throw new ERR_INVALID_ARG_TYPE('input', ['ArrayBuffer', 'Buffer', 'TypedArray'], input); +} + +module.exports = { + Buffer, + SlowBuffer: deprecate( + SlowBuffer, + 'SlowBuffer() is deprecated. Please use Buffer.allocUnsafeSlow()', + 'DEP0030'), + transcode, + isUtf8, + isAscii, + + // Legacy + kMaxLength, + kStringMaxLength, + btoa, + atob, +}; + +ObjectDefineProperties(module.exports, { + constants: { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, + }, + INSPECT_MAX_BYTES: { + __proto__: null, + configurable: true, + enumerable: true, + get() { return INSPECT_MAX_BYTES; }, + set(val) { + validateNumber(val, 'INSPECT_MAX_BYTES', 0); + INSPECT_MAX_BYTES = val; + }, + }, +}); + +defineLazyProperties( + module.exports, + 'internal/blob', + ['Blob', 'resolveObjectURL'], +); +defineLazyProperties( + module.exports, + 'internal/file', + ['File'], +); \ No newline at end of file diff --git a/.codesandbox/node/child_process.js b/.codesandbox/node/child_process.js new file mode 100644 index 00000000..f531ed51 --- /dev/null +++ b/.codesandbox/node/child_process.js @@ -0,0 +1,1021 @@ +"use strict"; + +const { + ArrayIsArray, + ArrayPrototypeFilter, + ArrayPrototypeIncludes, + ArrayPrototypeJoin, + ArrayPrototypeLastIndexOf, + ArrayPrototypePush, + ArrayPrototypePushApply, + ArrayPrototypeSlice, + ArrayPrototypeSort, + ArrayPrototypeSplice, + ArrayPrototypeUnshift, + ObjectAssign, + ObjectDefineProperty, + ObjectPrototypeHasOwnProperty, + PromiseWithResolvers, + RegExpPrototypeExec, + SafeSet, + StringPrototypeIncludes, + StringPrototypeSlice, + StringPrototypeToUpperCase, + SymbolDispose, +} = primordials; + +const { + assignFunctionName, + convertToValidSignal, + getSystemErrorName, + kEmptyObject, + promisify, +} = require("internal/util"); +const { isArrayBufferView } = require("internal/util/types"); +let debug = require("internal/util/debuglog").debuglog( + "child_process", + (fn) => { + debug = fn; + } +); +const { Buffer } = require("buffer"); +const { Pipe, constants: PipeConstants } = internalBinding("pipe_wrap"); + +const { + AbortError, + codes: { + ERR_CHILD_PROCESS_IPC_REQUIRED, + ERR_CHILD_PROCESS_STDIO_MAXBUFFER, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + }, + genericNodeError, +} = require("internal/errors"); +const { clearTimeout, setTimeout } = require("timers"); +const { getValidatedPath } = require("internal/fs/utils"); +const { + validateAbortSignal, + validateArray, + validateBoolean, + validateFunction, + validateInteger, + validateInt32, + validateNumber, + validateObject, + validateString, +} = require("internal/validators"); +const child_process = require("internal/child_process"); +const { getValidStdio, setupChannel, ChildProcess, stdioStringToArray } = + child_process; + +const MAX_BUFFER = 1024 * 1024; + +const permission = require("internal/process/permission"); + +const isZOS = process.platform === "os390"; +let addAbortListener; + +/** + * Spawns a new Node.js process + fork. + * @param {string|URL} modulePath + * @param {string[]} [args] + * @param {{ + * cwd?: string | URL; + * detached?: boolean; + * env?: Record; + * execPath?: string; + * execArgv?: string[]; + * gid?: number; + * serialization?: string; + * signal?: AbortSignal; + * killSignal?: string | number; + * silent?: boolean; + * stdio?: Array | string; + * uid?: number; + * windowsVerbatimArguments?: boolean; + * timeout?: number; + * }} [options] + * @returns {ChildProcess} + */ +function fork(modulePath, args = [], options) { + modulePath = getValidatedPath(modulePath, "modulePath"); + + // Get options and args arguments. + let execArgv; + + if (args == null) { + args = []; + } else if (typeof args === "object" && !ArrayIsArray(args)) { + options = args; + args = []; + } else { + validateArray(args, "args"); + } + + if (options != null) { + validateObject(options, "options"); + } + options = { __proto__: null, ...options, shell: false }; + options.execPath ||= process.execPath; + validateArgumentNullCheck(options.execPath, "options.execPath"); + + // Prepare arguments for fork: + execArgv = options.execArgv || process.execArgv; + validateArgumentsNullCheck(execArgv, "options.execArgv"); + + if (execArgv === process.execArgv && process._eval != null) { + const index = ArrayPrototypeLastIndexOf(execArgv, process._eval); + if (index > 0) { + // Remove the -e switch to avoid fork bombing ourselves. + execArgv = ArrayPrototypeSlice(execArgv); + ArrayPrototypeSplice(execArgv, index - 1, 2); + } + } + + args = [...execArgv, modulePath, ...args]; + + if (typeof options.stdio === "string") { + options.stdio = stdioStringToArray(options.stdio, "ipc"); + } else if (!ArrayIsArray(options.stdio)) { + // Use a separate fd=3 for the IPC channel. Inherit stdin, stdout, + // and stderr from the parent if silent isn't set. + options.stdio = stdioStringToArray( + options.silent ? "pipe" : "inherit", + "ipc" + ); + } else if (!ArrayPrototypeIncludes(options.stdio, "ipc")) { + throw new ERR_CHILD_PROCESS_IPC_REQUIRED("options.stdio"); + } + + return spawn(options.execPath, args, options); +} + +function _forkChild(fd, serializationMode) { + // set process.send() + const p = new Pipe(PipeConstants.IPC); + p.open(fd); + p.unref(); + const control = setupChannel(process, p, serializationMode); + process.on("newListener", function onNewListener(name) { + if (name === "message" || name === "disconnect") control.refCounted(); + }); + process.on("removeListener", function onRemoveListener(name) { + if (name === "message" || name === "disconnect") control.unrefCounted(); + }); +} + +function normalizeExecArgs(command, options, callback) { + validateString(command, "command"); + validateArgumentNullCheck(command, "command"); + + if (typeof options === "function") { + callback = options; + options = undefined; + } + + // Make a shallow copy so we don't clobber the user's options object. + options = { __proto__: null, ...options }; + options.shell = typeof options.shell === "string" ? options.shell : true; + + return { + file: command, + options: options, + callback: callback, + }; +} + +/** + * Spawns a shell executing the given command. + * @param {string} command + * @param {{ + * cmd?: string; + * env?: Record; + * encoding?: string; + * shell?: string; + * signal?: AbortSignal; + * timeout?: number; + * maxBuffer?: number; + * killSignal?: string | number; + * uid?: number; + * gid?: number; + * windowsHide?: boolean; + * }} [options] + * @param {( + * error?: Error, + * stdout?: string | Buffer, + * stderr?: string | Buffer + * ) => any} [callback] + * @returns {ChildProcess} + */ +function exec(command, options, callback) { + const opts = normalizeExecArgs(command, options, callback); + return module.exports.execFile(opts.file, opts.options, opts.callback); +} + +const customPromiseExecFunction = (orig) => { + return assignFunctionName(orig.name, function (...args) { + const { promise, resolve, reject } = PromiseWithResolvers(); + + promise.child = orig(...args, (err, stdout, stderr) => { + if (err !== null) { + err.stdout = stdout; + err.stderr = stderr; + reject(err); + } else { + resolve({ stdout, stderr }); + } + }); + + return promise; + }); +}; + +ObjectDefineProperty(exec, promisify.custom, { + __proto__: null, + enumerable: false, + value: customPromiseExecFunction(exec), +}); + +function normalizeExecFileArgs(file, args, options, callback) { + if (ArrayIsArray(args)) { + args = ArrayPrototypeSlice(args); + } else if (args != null && typeof args === "object") { + callback = options; + options = args; + args = null; + } else if (typeof args === "function") { + callback = args; + options = null; + args = null; + } + + args ??= []; + + if (typeof options === "function") { + callback = options; + } else if (options != null) { + validateObject(options, "options"); + } + + options ??= kEmptyObject; + + if (callback != null) { + validateFunction(callback, "callback"); + } + + // Validate argv0, if present. + if (options.argv0 != null) { + validateString(options.argv0, "options.argv0"); + validateArgumentNullCheck(options.argv0, "options.argv0"); + } + + return { file, args, options, callback }; +} + +/** + * Spawns the specified file as a shell. + * @param {string} file + * @param {string[]} [args] + * @param {{ + * cwd?: string | URL; + * env?: Record; + * encoding?: string; + * timeout?: number; + * maxBuffer?: number; + * killSignal?: string | number; + * uid?: number; + * gid?: number; + * windowsHide?: boolean; + * windowsVerbatimArguments?: boolean; + * shell?: boolean | string; + * signal?: AbortSignal; + * }} [options] + * @param {( + * error?: Error, + * stdout?: string | Buffer, + * stderr?: string | Buffer + * ) => any} [callback] + * @returns {ChildProcess} + */ +function execFile(file, args, options, callback) { + ({ file, args, options, callback } = normalizeExecFileArgs( + file, + args, + options, + callback + )); + + options = { + __proto__: null, + encoding: "utf8", + timeout: 0, + maxBuffer: MAX_BUFFER, + killSignal: "SIGTERM", + cwd: null, + env: null, + shell: false, + ...options, + }; + + // Validate the timeout, if present. + validateTimeout(options.timeout); + + // Validate maxBuffer, if present. + validateMaxBuffer(options.maxBuffer); + + options.killSignal = sanitizeKillSignal(options.killSignal); + + const child = spawn(file, args, { + cwd: options.cwd, + env: options.env, + gid: options.gid, + shell: options.shell, + signal: options.signal, + uid: options.uid, + windowsHide: !!options.windowsHide, + windowsVerbatimArguments: !!options.windowsVerbatimArguments, + }); + + let encoding; + const _stdout = []; + const _stderr = []; + if (options.encoding !== "buffer" && Buffer.isEncoding(options.encoding)) { + encoding = options.encoding; + } else { + encoding = null; + } + let stdoutLen = 0; + let stderrLen = 0; + let killed = false; + let exited = false; + let timeoutId; + + let ex = null; + + let cmd = file; + + function exithandler(code, signal) { + if (exited) return; + exited = true; + + if (timeoutId) { + clearTimeout(timeoutId); + timeoutId = null; + } + + if (!callback) return; + + // merge chunks + let stdout; + let stderr; + if (encoding || child.stdout?.readableEncoding) { + stdout = ArrayPrototypeJoin(_stdout, ""); + } else { + stdout = Buffer.concat(_stdout); + } + if (encoding || child.stderr?.readableEncoding) { + stderr = ArrayPrototypeJoin(_stderr, ""); + } else { + stderr = Buffer.concat(_stderr); + } + + if (!ex && code === 0 && signal === null) { + callback(null, stdout, stderr); + return; + } + + if (args?.length) cmd += ` ${ArrayPrototypeJoin(args, " ")}`; + + ex ||= genericNodeError(`Command failed: ${cmd}\n${stderr}`, { + code: code < 0 ? getSystemErrorName(code) : code, + killed: child.killed || killed, + signal: signal, + }); + + ex.cmd = cmd; + callback(ex, stdout, stderr); + } + + function errorhandler(e) { + ex = e; + + if (child.stdout) child.stdout.destroy(); + + if (child.stderr) child.stderr.destroy(); + + exithandler(); + } + + function kill() { + if (child.stdout) child.stdout.destroy(); + + if (child.stderr) child.stderr.destroy(); + + killed = true; + try { + child.kill(options.killSignal); + } catch (e) { + ex = e; + exithandler(); + } + } + + if (options.timeout > 0) { + timeoutId = setTimeout(function delayedKill() { + kill(); + timeoutId = null; + }, options.timeout); + } + + if (child.stdout) { + if (encoding) child.stdout.setEncoding(encoding); + + child.stdout.on("data", function onChildStdout(chunk) { + // Do not need to count the length + if (options.maxBuffer === Infinity) { + ArrayPrototypePush(_stdout, chunk); + return; + } + const encoding = child.stdout.readableEncoding; + const length = encoding + ? Buffer.byteLength(chunk, encoding) + : chunk.length; + const slice = encoding + ? StringPrototypeSlice + : (buf, ...args) => buf.slice(...args); + stdoutLen += length; + + if (stdoutLen > options.maxBuffer) { + const truncatedLen = options.maxBuffer - (stdoutLen - length); + ArrayPrototypePush(_stdout, slice(chunk, 0, truncatedLen)); + + ex = new ERR_CHILD_PROCESS_STDIO_MAXBUFFER("stdout"); + kill(); + } else { + ArrayPrototypePush(_stdout, chunk); + } + }); + } + + if (child.stderr) { + if (encoding) child.stderr.setEncoding(encoding); + + child.stderr.on("data", function onChildStderr(chunk) { + // Do not need to count the length + if (options.maxBuffer === Infinity) { + ArrayPrototypePush(_stderr, chunk); + return; + } + const encoding = child.stderr.readableEncoding; + const length = encoding + ? Buffer.byteLength(chunk, encoding) + : chunk.length; + stderrLen += length; + + if (stderrLen > options.maxBuffer) { + const truncatedLen = options.maxBuffer - (stderrLen - length); + ArrayPrototypePush(_stderr, chunk.slice(0, truncatedLen)); + + ex = new ERR_CHILD_PROCESS_STDIO_MAXBUFFER("stderr"); + kill(); + } else { + ArrayPrototypePush(_stderr, chunk); + } + }); + } + + child.addListener("close", exithandler); + child.addListener("error", errorhandler); + + return child; +} + +ObjectDefineProperty(execFile, promisify.custom, { + __proto__: null, + enumerable: false, + value: customPromiseExecFunction(execFile), +}); + +function copyProcessEnvToEnv(env, name, optionEnv) { + if ( + process.env[name] && + (!optionEnv || !ObjectPrototypeHasOwnProperty(optionEnv, name)) + ) { + env[name] = process.env[name]; + } +} + +let permissionModelFlagsToCopy; + +function getPermissionModelFlagsToCopy() { + if (permissionModelFlagsToCopy === undefined) { + permissionModelFlagsToCopy = [ + ...permission.availableFlags(), + "--permission", + ]; + } + return permissionModelFlagsToCopy; +} + +function copyPermissionModelFlagsToEnv(env, key, args) { + // Do not override if permission was already passed to file + if ( + args.includes("--permission") || + (env[key] && env[key].indexOf("--permission") !== -1) + ) { + return; + } + + const flagsToCopy = getPermissionModelFlagsToCopy(); + for (const arg of process.execArgv) { + for (const flag of flagsToCopy) { + if (arg.startsWith(flag)) { + env[key] = `${env[key] ? env[key] + " " + arg : arg}`; + } + } + } +} + +let emittedDEP0190Already = false; +function normalizeSpawnArguments(file, args, options) { + validateString(file, "file"); + validateArgumentNullCheck(file, "file"); + + if (file.length === 0) + throw new ERR_INVALID_ARG_VALUE("file", file, "cannot be empty"); + + if (ArrayIsArray(args)) { + args = ArrayPrototypeSlice(args); + } else if (args == null) { + args = []; + } else if (typeof args !== "object") { + throw new ERR_INVALID_ARG_TYPE("args", "object", args); + } else { + options = args; + args = []; + } + + validateArgumentsNullCheck(args, "args"); + + if (options === undefined) options = kEmptyObject; + else validateObject(options, "options"); + + options = { __proto__: null, ...options }; + let cwd = options.cwd; + + // Validate the cwd, if present. + if (cwd != null) { + cwd = getValidatedPath(cwd, "options.cwd"); + } + + // Validate detached, if present. + if (options.detached != null) { + validateBoolean(options.detached, "options.detached"); + } + + // Validate the uid, if present. + if (options.uid != null) { + validateInt32(options.uid, "options.uid"); + } + + // Validate the gid, if present. + if (options.gid != null) { + validateInt32(options.gid, "options.gid"); + } + + // Validate the shell, if present. + if ( + options.shell != null && + typeof options.shell !== "boolean" && + typeof options.shell !== "string" + ) { + throw new ERR_INVALID_ARG_TYPE( + "options.shell", + ["boolean", "string"], + options.shell + ); + } + + // Validate argv0, if present. + if (options.argv0 != null) { + validateString(options.argv0, "options.argv0"); + validateArgumentNullCheck(options.argv0, "options.argv0"); + } + + // Validate windowsHide, if present. + if (options.windowsHide != null) { + validateBoolean(options.windowsHide, "options.windowsHide"); + } + + // Validate windowsVerbatimArguments, if present. + let { windowsVerbatimArguments } = options; + if (windowsVerbatimArguments != null) { + validateBoolean( + windowsVerbatimArguments, + "options.windowsVerbatimArguments" + ); + } + + if (options.shell) { + validateArgumentNullCheck(options.shell, "options.shell"); + if (args.length > 0 && !emittedDEP0190Already) { + process.emitWarning( + "Passing args to a child process with shell option true can lead to security " + + "vulnerabilities, as the arguments are not escaped, only concatenated.", + "DeprecationWarning", + "DEP0190" + ); + emittedDEP0190Already = true; + } + + const command = + args.length > 0 ? `${file} ${ArrayPrototypeJoin(args, " ")}` : file; + // Set the shell, switches, and commands. + if (process.platform === "win32") { + if (typeof options.shell === "string") file = options.shell; + else file = process.env.comspec || "cmd.exe"; + // '/d /s /c' is used only for cmd.exe. + if (RegExpPrototypeExec(/^(?:.*\\)?cmd(?:\.exe)?$/i, file) !== null) { + args = ["/d", "/s", "/c", `"${command}"`]; + windowsVerbatimArguments = true; + } else { + args = ["-c", command]; + } + } else { + if (typeof options.shell === "string") file = options.shell; + else if (process.platform === "android") file = "/system/bin/sh"; + else file = "/bin/sh"; + args = ["-c", command]; + } + } + + if (typeof options.argv0 === "string") { + ArrayPrototypeUnshift(args, options.argv0); + } else { + ArrayPrototypeUnshift(args, file); + } + + // Shallow copy to guarantee changes won't impact process.env + const env = options.env || { ...process.env }; + const envPairs = []; + + // process.env.NODE_V8_COVERAGE always propagates, making it possible to + // collect coverage for programs that spawn with white-listed environment. + copyProcessEnvToEnv(env, "NODE_V8_COVERAGE", options.env); + + if (isZOS) { + // The following environment variables must always propagate if set. + copyProcessEnvToEnv(env, "_BPXK_AUTOCVT", options.env); + copyProcessEnvToEnv(env, "_CEE_RUNOPTS", options.env); + copyProcessEnvToEnv(env, "_TAG_REDIR_ERR", options.env); + copyProcessEnvToEnv(env, "_TAG_REDIR_IN", options.env); + copyProcessEnvToEnv(env, "_TAG_REDIR_OUT", options.env); + copyProcessEnvToEnv(env, "STEPLIB", options.env); + copyProcessEnvToEnv(env, "LIBPATH", options.env); + copyProcessEnvToEnv(env, "_EDC_SIG_DFLT", options.env); + copyProcessEnvToEnv(env, "_EDC_SUSV3", options.env); + } + + if (permission.isEnabled()) { + copyPermissionModelFlagsToEnv(env, "NODE_OPTIONS", args); + } + + let envKeys = []; + // Prototype values are intentionally included. + for (const key in env) { + ArrayPrototypePush(envKeys, key); + } + + if (process.platform === "win32") { + // On Windows env keys are case insensitive. Filter out duplicates, + // keeping only the first one (in lexicographic order) + const sawKey = new SafeSet(); + envKeys = ArrayPrototypeFilter(ArrayPrototypeSort(envKeys), (key) => { + const uppercaseKey = StringPrototypeToUpperCase(key); + if (sawKey.has(uppercaseKey)) { + return false; + } + sawKey.add(uppercaseKey); + return true; + }); + } + + for (const key of envKeys) { + const value = env[key]; + if (value !== undefined) { + validateArgumentNullCheck(key, `options.env['${key}']`); + validateArgumentNullCheck(value, `options.env['${key}']`); + ArrayPrototypePush(envPairs, `${key}=${value}`); + } + } + + return { + // Make a shallow copy so we don't clobber the user's options object. + __proto__: null, + ...options, + args, + cwd, + detached: !!options.detached, + envPairs, + file, + windowsHide: !!options.windowsHide, + windowsVerbatimArguments: !!windowsVerbatimArguments, + }; +} + +function abortChildProcess(child, killSignal, reason) { + if (!child) return; + try { + if (child.kill(killSignal)) { + child.emit("error", new AbortError(undefined, { cause: reason })); + } + } catch (err) { + child.emit("error", err); + } +} + +/** + * Spawns a new process using the given `file`. + * @param {string} file + * @param {string[]} [args] + * @param {{ + * cwd?: string | URL; + * env?: Record; + * argv0?: string; + * stdio?: Array | string; + * detached?: boolean; + * uid?: number; + * gid?: number; + * serialization?: string; + * shell?: boolean | string; + * windowsVerbatimArguments?: boolean; + * windowsHide?: boolean; + * signal?: AbortSignal; + * timeout?: number; + * killSignal?: string | number; + * }} [options] + * @returns {ChildProcess} + */ +function spawn(file, args, options) { + options = normalizeSpawnArguments(file, args, options); + validateTimeout(options.timeout); + validateAbortSignal(options.signal, "options.signal"); + const killSignal = sanitizeKillSignal(options.killSignal); + const child = new ChildProcess(); + + debug("spawn", options); + child.spawn(options); + + if (options.timeout > 0) { + let timeoutId = setTimeout(() => { + if (timeoutId) { + try { + child.kill(killSignal); + } catch (err) { + child.emit("error", err); + } + timeoutId = null; + } + }, options.timeout); + + child.once("exit", () => { + if (timeoutId) { + clearTimeout(timeoutId); + timeoutId = null; + } + }); + } + + if (options.signal) { + const signal = options.signal; + if (signal.aborted) { + process.nextTick(onAbortListener); + } else { + addAbortListener ??= + require("internal/events/abort_listener").addAbortListener; + const disposable = addAbortListener(signal, onAbortListener); + child.once("exit", disposable[SymbolDispose]); + } + + function onAbortListener() { + abortChildProcess(child, killSignal, options.signal.reason); + } + } + + return child; +} + +/** + * Spawns a new process synchronously using the given `file`. + * @param {string} file + * @param {string[]} [args] + * @param {{ + * cwd?: string | URL; + * input?: string | Buffer | TypedArray | DataView; + * argv0?: string; + * stdio?: string | Array; + * env?: Record; + * uid?: number; + * gid?: number; + * timeout?: number; + * killSignal?: string | number; + * maxBuffer?: number; + * encoding?: string; + * shell?: boolean | string; + * windowsVerbatimArguments?: boolean; + * windowsHide?: boolean; + * }} [options] + * @returns {{ + * pid: number; + * output: Array; + * stdout: Buffer | string; + * stderr: Buffer | string; + * status: number | null; + * signal: string | null; + * error: Error; + * }} + */ +function spawnSync(file, args, options) { + options = { + __proto__: null, + maxBuffer: MAX_BUFFER, + ...normalizeSpawnArguments(file, args, options), + }; + + debug("spawnSync", options); + + // Validate the timeout, if present. + validateTimeout(options.timeout); + + // Validate maxBuffer, if present. + validateMaxBuffer(options.maxBuffer); + + // Validate and translate the kill signal, if present. + options.killSignal = sanitizeKillSignal(options.killSignal); + + options.stdio = getValidStdio(options.stdio || "pipe", true).stdio; + + if (options.input) { + const stdin = (options.stdio[0] = { ...options.stdio[0] }); + stdin.input = options.input; + } + + // We may want to pass data in on any given fd, ensure it is a valid buffer + for (let i = 0; i < options.stdio.length; i++) { + const input = options.stdio[i]?.input; + if (input != null) { + const pipe = (options.stdio[i] = { ...options.stdio[i] }); + if (isArrayBufferView(input)) { + pipe.input = input; + } else if (typeof input === "string") { + pipe.input = Buffer.from(input, options.encoding); + } else { + throw new ERR_INVALID_ARG_TYPE( + `options.stdio[${i}]`, + ["Buffer", "TypedArray", "DataView", "string"], + input + ); + } + } + } + + return child_process.spawnSync(options); +} + +function checkExecSyncError(ret, args, cmd) { + let err; + if (ret.error) { + err = ret.error; + ObjectAssign(err, ret); + } else if (ret.status !== 0) { + let msg = "Command failed: "; + msg += cmd || ArrayPrototypeJoin(args, " "); + if (ret.stderr && ret.stderr.length > 0) + msg += `\n${ret.stderr.toString()}`; + err = genericNodeError(msg, ret); + } + return err; +} + +/** + * Spawns a file as a shell synchronously. + * @param {string} file + * @param {string[]} [args] + * @param {{ + * cwd?: string | URL; + * input?: string | Buffer | TypedArray | DataView; + * stdio?: string | Array; + * env?: Record; + * uid?: number; + * gid?: number; + * timeout?: number; + * killSignal?: string | number; + * maxBuffer?: number; + * encoding?: string; + * windowsHide?: boolean; + * shell?: boolean | string; + * }} [options] + * @returns {Buffer | string} + */ +function execFileSync(file, args, options) { + ({ file, args, options } = normalizeExecFileArgs(file, args, options)); + + const inheritStderr = !options.stdio; + const ret = spawnSync(file, args, options); + + if (inheritStderr && ret.stderr) process.stderr.write(ret.stderr); + + const errArgs = [options.argv0 || file]; + ArrayPrototypePushApply(errArgs, args); + const err = checkExecSyncError(ret, errArgs); + + if (err) throw err; + + return ret.stdout; +} + +/** + * Spawns a shell executing the given `command` synchronously. + * @param {string} command + * @param {{ + * cwd?: string | URL; + * input?: string | Buffer | TypedArray | DataView; + * stdio?: string | Array; + * env?: Record; + * shell?: string; + * uid?: number; + * gid?: number; + * timeout?: number; + * killSignal?: string | number; + * maxBuffer?: number; + * encoding?: string; + * windowsHide?: boolean; + * }} [options] + * @returns {Buffer | string} + */ +function execSync(command, options) { + const opts = normalizeExecArgs(command, options, null); + const inheritStderr = !opts.options.stdio; + + const ret = spawnSync(opts.file, opts.options); + + if (inheritStderr && ret.stderr) process.stderr.write(ret.stderr); + + const err = checkExecSyncError(ret, undefined, command); + + if (err) throw err; + + return ret.stdout; +} + +function validateArgumentNullCheck(arg, propName) { + if (typeof arg === "string" && StringPrototypeIncludes(arg, "\u0000")) { + throw new ERR_INVALID_ARG_VALUE( + propName, + arg, + "must be a string without null bytes" + ); + } +} + +function validateArgumentsNullCheck(args, propName) { + for (let i = 0; i < args.length; ++i) { + validateArgumentNullCheck(args[i], `${propName}[${i}]`); + } +} + +function validateTimeout(timeout) { + if (timeout != null) { + validateInteger(timeout, "timeout", 0); + } +} + +function validateMaxBuffer(maxBuffer) { + if (maxBuffer != null) { + validateNumber(maxBuffer, "options.maxBuffer", 0); + } +} + +function sanitizeKillSignal(killSignal) { + if (typeof killSignal === "string" || typeof killSignal === "number") { + return convertToValidSignal(killSignal); + } else if (killSignal != null) { + throw new ERR_INVALID_ARG_TYPE( + "options.killSignal", + ["string", "number"], + killSignal + ); + } +} + +module.exports = { + _forkChild, + ChildProcess, + exec, + execFile, + execFileSync, + execSync, + fork, + spawn, + spawnSync, +}; diff --git a/.codesandbox/node/cluster.js b/.codesandbox/node/cluster.js new file mode 100644 index 00000000..6f3dc168 --- /dev/null +++ b/.codesandbox/node/cluster.js @@ -0,0 +1,8 @@ +'use strict'; + +const { + ObjectPrototypeHasOwnProperty: ObjectHasOwn, +} = primordials; + +const childOrPrimary = ObjectHasOwn(process.env, 'NODE_UNIQUE_ID') ? 'child' : 'primary'; +module.exports = require(`internal/cluster/${childOrPrimary}`); \ No newline at end of file diff --git a/.codesandbox/node/console.js b/.codesandbox/node/console.js new file mode 100644 index 00000000..d896d2e3 --- /dev/null +++ b/.codesandbox/node/console.js @@ -0,0 +1,3 @@ +"use strict"; + +module.exports = require("internal/console/global"); diff --git a/.codesandbox/node/crypto.js b/.codesandbox/node/crypto.js new file mode 100644 index 00000000..06bfebc7 --- /dev/null +++ b/.codesandbox/node/crypto.js @@ -0,0 +1,1365 @@ +'use strict'; + +const { + Array, + ArrayBufferIsView, + ArrayIsArray, + ArrayPrototypeForEach, + MathFloor, + MathMin, + MathTrunc, + NumberIsInteger, + NumberIsNaN, + NumberMAX_SAFE_INTEGER, + NumberMIN_SAFE_INTEGER, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectPrototypeHasOwnProperty, + ObjectSetPrototypeOf, + RegExpPrototypeSymbolReplace, + StringPrototypeCharCodeAt, + StringPrototypeSlice, + StringPrototypeToLowerCase, + StringPrototypeTrim, + SymbolSpecies, + SymbolToPrimitive, + TypedArrayPrototypeFill, + TypedArrayPrototypeGetBuffer, + TypedArrayPrototypeGetByteLength, + TypedArrayPrototypeGetByteOffset, + TypedArrayPrototypeGetLength, + TypedArrayPrototypeSet, + TypedArrayPrototypeSlice, + Uint8Array, + Uint8ArrayPrototype, +} = primordials; + +const { + byteLengthUtf8, + compare: _compare, + compareOffset, + copy: _copy, + fill: bindingFill, + isAscii: bindingIsAscii, + isUtf8: bindingIsUtf8, + indexOfBuffer, + indexOfNumber, + indexOfString, + swap16: _swap16, + swap32: _swap32, + swap64: _swap64, + kMaxLength, + kStringMaxLength, + atob: _atob, + btoa: _btoa, +} = internalBinding('buffer'); +const { + constants: { + ALL_PROPERTIES, + ONLY_ENUMERABLE, + }, + getOwnNonIndexProperties, + isInsideNodeModules, +} = internalBinding('util'); +const { + customInspectSymbol, + lazyDOMException, + normalizeEncoding, + kIsEncodingSymbol, + defineLazyProperties, + encodingsMap, + deprecate, +} = require('internal/util'); +const { + isAnyArrayBuffer, + isArrayBufferView, + isUint8Array, + isTypedArray, +} = require('internal/util/types'); +const { + inspect: utilInspect, +} = require('internal/util/inspect'); + +const { + codes: { + ERR_BUFFER_OUT_OF_BOUNDS, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_BUFFER_SIZE, + ERR_MISSING_ARGS, + ERR_OUT_OF_RANGE, + ERR_UNKNOWN_ENCODING, + }, + genericNodeError, +} = require('internal/errors'); +const { + validateArray, + validateBuffer, + validateInteger, + validateNumber, + validateString, +} = require('internal/validators'); +// Provide validateInteger() but with kMaxLength as the default maximum value. +const validateOffset = (value, name, min = 0, max = kMaxLength) => + validateInteger(value, name, min, max); + +const { + FastBuffer, + markAsUntransferable, + addBufferPrototypeMethods, + createUnsafeBuffer, +} = require('internal/buffer'); + +FastBuffer.prototype.constructor = Buffer; +Buffer.prototype = FastBuffer.prototype; +addBufferPrototypeMethods(Buffer.prototype); + +const constants = ObjectDefineProperties({}, { + MAX_LENGTH: { + __proto__: null, + value: kMaxLength, + writable: false, + enumerable: true, + }, + MAX_STRING_LENGTH: { + __proto__: null, + value: kStringMaxLength, + writable: false, + enumerable: true, + }, +}); + +Buffer.poolSize = 8 * 1024; +let poolSize, poolOffset, allocPool, allocBuffer; + +function createPool() { + poolSize = Buffer.poolSize; + allocBuffer = createUnsafeBuffer(poolSize); + allocPool = allocBuffer.buffer; + markAsUntransferable(allocPool); + poolOffset = 0; +} +createPool(); + +function alignPool() { + // Ensure aligned slices + if (poolOffset & 0x7) { + poolOffset |= 0x7; + poolOffset++; + } +} + +let bufferWarningAlreadyEmitted = false; +let nodeModulesCheckCounter = 0; +const bufferWarning = 'Buffer() is deprecated due to security and usability ' + + 'issues. Please use the Buffer.alloc(), ' + + 'Buffer.allocUnsafe(), or Buffer.from() methods instead.'; + +function showFlaggedDeprecation() { + if (bufferWarningAlreadyEmitted || + ++nodeModulesCheckCounter > 10000 || + (!require('internal/options').getOptionValue('--pending-deprecation') && + isInsideNodeModules(100, true))) { + // We don't emit a warning, because we either: + // - Already did so, or + // - Already checked too many times whether a call is coming + // from node_modules and want to stop slowing down things, or + // - We aren't running with `--pending-deprecation` enabled, + // and the code is inside `node_modules`. + // - We found node_modules in up to the topmost 100 frames, or + // there are more than 100 frames and we don't want to search anymore. + return; + } + + process.emitWarning(bufferWarning, 'DeprecationWarning', 'DEP0005'); + bufferWarningAlreadyEmitted = true; +} + +function toInteger(n, defaultVal) { + n = +n; + if (!NumberIsNaN(n) && + n >= NumberMIN_SAFE_INTEGER && + n <= NumberMAX_SAFE_INTEGER) { + return ((n % 1) === 0 ? n : MathFloor(n)); + } + return defaultVal; +} + +function copyImpl(source, target, targetStart, sourceStart, sourceEnd) { + if (!ArrayBufferIsView(source)) + throw new ERR_INVALID_ARG_TYPE('source', ['Buffer', 'Uint8Array'], source); + if (!ArrayBufferIsView(target)) + throw new ERR_INVALID_ARG_TYPE('target', ['Buffer', 'Uint8Array'], target); + + if (targetStart === undefined) { + targetStart = 0; + } else { + targetStart = NumberIsInteger(targetStart) ? targetStart : toInteger(targetStart, 0); + if (targetStart < 0) + throw new ERR_OUT_OF_RANGE('targetStart', '>= 0', targetStart); + } + + if (sourceStart === undefined) { + sourceStart = 0; + } else { + sourceStart = NumberIsInteger(sourceStart) ? sourceStart : toInteger(sourceStart, 0); + if (sourceStart < 0 || sourceStart > source.byteLength) + throw new ERR_OUT_OF_RANGE('sourceStart', `>= 0 && <= ${source.byteLength}`, sourceStart); + } + + if (sourceEnd === undefined) { + sourceEnd = source.byteLength; + } else { + sourceEnd = NumberIsInteger(sourceEnd) ? sourceEnd : toInteger(sourceEnd, 0); + if (sourceEnd < 0) + throw new ERR_OUT_OF_RANGE('sourceEnd', '>= 0', sourceEnd); + } + + if (targetStart >= target.byteLength || sourceStart >= sourceEnd) + return 0; + + return _copyActual(source, target, targetStart, sourceStart, sourceEnd); +} + +function _copyActual(source, target, targetStart, sourceStart, sourceEnd) { + if (sourceEnd - sourceStart > target.byteLength - targetStart) + sourceEnd = sourceStart + target.byteLength - targetStart; + + let nb = sourceEnd - sourceStart; + const sourceLen = source.byteLength - sourceStart; + if (nb > sourceLen) + nb = sourceLen; + + if (nb <= 0) + return 0; + + _copy(source, target, targetStart, sourceStart, nb); + + return nb; +} + +/** + * The Buffer() constructor is deprecated in documentation and should not be + * used moving forward. Rather, developers should use one of the three new + * factory APIs: Buffer.from(), Buffer.allocUnsafe() or Buffer.alloc() based on + * their specific needs. There is no runtime deprecation because of the extent + * to which the Buffer constructor is used in the ecosystem currently -- a + * runtime deprecation would introduce too much breakage at this time. It's not + * likely that the Buffer constructors would ever actually be removed. + * Deprecation Code: DEP0005 + * @returns {Buffer} + */ +function Buffer(arg, encodingOrOffset, length) { + showFlaggedDeprecation(); + // Common case. + if (typeof arg === 'number') { + if (typeof encodingOrOffset === 'string') { + throw new ERR_INVALID_ARG_TYPE('string', 'string', arg); + } + return Buffer.alloc(arg); + } + return Buffer.from(arg, encodingOrOffset, length); +} + +ObjectDefineProperty(Buffer, SymbolSpecies, { + __proto__: null, + enumerable: false, + configurable: true, + get() { return FastBuffer; }, +}); + +/** + * Functionally equivalent to Buffer(arg, encoding) but throws a TypeError + * if value is a number. + * Buffer.from(str[, encoding]) + * Buffer.from(array) + * Buffer.from(buffer) + * Buffer.from(arrayBuffer[, byteOffset[, length]]) + * @param {any} value + * @param {BufferEncoding|number} encodingOrOffset + * @param {number} [length] + * @returns {Buffer} + */ +Buffer.from = function from(value, encodingOrOffset, length) { + if (typeof value === 'string') + return fromString(value, encodingOrOffset); + + if (typeof value === 'object' && value !== null) { + if (isAnyArrayBuffer(value)) + return fromArrayBuffer(value, encodingOrOffset, length); + + const valueOf = value.valueOf && value.valueOf(); + if (valueOf != null && + valueOf !== value && + (typeof valueOf === 'string' || typeof valueOf === 'object')) { + return from(valueOf, encodingOrOffset, length); + } + + const b = fromObject(value); + if (b) + return b; + + if (typeof value[SymbolToPrimitive] === 'function') { + const primitive = value[SymbolToPrimitive]('string'); + if (typeof primitive === 'string') { + return fromString(primitive, encodingOrOffset); + } + } + } + + throw new ERR_INVALID_ARG_TYPE( + 'first argument', + ['string', 'Buffer', 'ArrayBuffer', 'Array', 'Array-like Object'], + value, + ); +}; + +/** + * Creates the Buffer as a copy of the underlying ArrayBuffer of the view + * rather than the contents of the view. + * @param {TypedArray} view + * @param {number} [offset] + * @param {number} [length] + * @returns {Buffer} + */ +Buffer.copyBytesFrom = function copyBytesFrom(view, offset, length) { + if (!isTypedArray(view)) { + throw new ERR_INVALID_ARG_TYPE('view', [ 'TypedArray' ], view); + } + + const viewLength = TypedArrayPrototypeGetLength(view); + if (viewLength === 0) { + return Buffer.alloc(0); + } + + if (offset !== undefined || length !== undefined) { + if (offset !== undefined) { + validateInteger(offset, 'offset', 0); + if (offset >= viewLength) return Buffer.alloc(0); + } else { + offset = 0; + } + let end; + if (length !== undefined) { + validateInteger(length, 'length', 0); + end = offset + length; + } else { + end = viewLength; + } + + view = TypedArrayPrototypeSlice(view, offset, end); + } + + return fromArrayLike(new Uint8Array( + TypedArrayPrototypeGetBuffer(view), + TypedArrayPrototypeGetByteOffset(view), + TypedArrayPrototypeGetByteLength(view))); +}; + +// Identical to the built-in %TypedArray%.of(), but avoids using the deprecated +// Buffer() constructor. Must use arrow function syntax to avoid automatically +// adding a `prototype` property and making the function a constructor. +// +// Refs: https://tc39.github.io/ecma262/#sec-%typedarray%.of +// Refs: https://esdiscuss.org/topic/isconstructor#content-11 +const of = (...items) => { + const newObj = createUnsafeBuffer(items.length); + for (let k = 0; k < items.length; k++) + newObj[k] = items[k]; + return newObj; +}; +Buffer.of = of; + +ObjectSetPrototypeOf(Buffer, Uint8Array); + +/** + * Creates a new filled Buffer instance. + * alloc(size[, fill[, encoding]]) + * @returns {FastBuffer} + */ +Buffer.alloc = function alloc(size, fill, encoding) { + validateNumber(size, 'size', 0, kMaxLength); + if (fill !== undefined && fill !== 0 && size > 0) { + const buf = createUnsafeBuffer(size); + return _fill(buf, fill, 0, buf.length, encoding); + } + return new FastBuffer(size); +}; + +/** + * Equivalent to Buffer(num), by default creates a non-zero-filled Buffer + * instance. If `--zero-fill-buffers` is set, will zero-fill the buffer. + * @returns {FastBuffer} + */ +Buffer.allocUnsafe = function allocUnsafe(size) { + validateNumber(size, 'size', 0, kMaxLength); + return allocate(size); +}; + +/** + * Equivalent to SlowBuffer(num), by default creates a non-zero-filled + * Buffer instance that is not allocated off the pre-initialized pool. + * If `--zero-fill-buffers` is set, will zero-fill the buffer. + * @param {number} size + * @returns {FastBuffer|undefined} + */ +Buffer.allocUnsafeSlow = function allocUnsafeSlow(size) { + validateNumber(size, 'size', 0, kMaxLength); + return createUnsafeBuffer(size); +}; + +// If --zero-fill-buffers command line argument is set, a zero-filled +// buffer is returned. +function SlowBuffer(size) { + validateNumber(size, 'size', 0, kMaxLength); + return createUnsafeBuffer(size); +} + +ObjectSetPrototypeOf(SlowBuffer.prototype, Uint8ArrayPrototype); +ObjectSetPrototypeOf(SlowBuffer, Uint8Array); + +function allocate(size) { + if (size <= 0) { + return new FastBuffer(); + } + if (size < (Buffer.poolSize >>> 1)) { + if (size > (poolSize - poolOffset)) + createPool(); + const b = new FastBuffer(allocPool, poolOffset, size); + poolOffset += size; + alignPool(); + return b; + } + return createUnsafeBuffer(size); +} + +function fromStringFast(string, ops) { + const maxLength = Buffer.poolSize >>> 1; + + let length = string.length; // Min length + + if (length >= maxLength) + return createFromString(string, ops); + + length *= 4; // Max length (4 bytes per character) + + if (length >= maxLength) + length = ops.byteLength(string); // Actual length + + if (length >= maxLength) + return createFromString(string, ops, length); + + if (length > (poolSize - poolOffset)) + createPool(); + + const actual = ops.write(allocBuffer, string, poolOffset, length); + const b = new FastBuffer(allocPool, poolOffset, actual); + + poolOffset += actual; + alignPool(); + return b; +} + +function createFromString(string, ops, length = ops.byteLength(string)) { + const buf = Buffer.allocUnsafeSlow(length); + const actual = ops.write(buf, string, 0, length); + return actual < length ? new FastBuffer(buf.buffer, 0, actual) : buf; +} + +function fromString(string, encoding) { + let ops; + if (!encoding || encoding === 'utf8' || typeof encoding !== 'string') { + ops = encodingOps.utf8; + } else { + ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + } + + return string.length === 0 ? new FastBuffer() : fromStringFast(string, ops); +} + +function fromArrayBuffer(obj, byteOffset, length) { + // Convert byteOffset to integer + if (byteOffset === undefined) { + byteOffset = 0; + } else { + byteOffset = +byteOffset; + if (NumberIsNaN(byteOffset)) + byteOffset = 0; + } + + const maxLength = obj.byteLength - byteOffset; + + if (maxLength < 0) + throw new ERR_BUFFER_OUT_OF_BOUNDS('offset'); + + if (length !== undefined) { + // Convert length to non-negative integer. + length = +length; + if (length > 0) { + if (length > maxLength) + throw new ERR_BUFFER_OUT_OF_BOUNDS('length'); + } else { + length = 0; + } + } + + return new FastBuffer(obj, byteOffset, length); +} + +function fromArrayLike(obj) { + if (obj.length <= 0) + return new FastBuffer(); + if (obj.length < (Buffer.poolSize >>> 1)) { + if (obj.length > (poolSize - poolOffset)) + createPool(); + const b = new FastBuffer(allocPool, poolOffset, obj.length); + TypedArrayPrototypeSet(b, obj, 0); + poolOffset += obj.length; + alignPool(); + return b; + } + return new FastBuffer(obj); +} + +function fromObject(obj) { + if (obj.length !== undefined || isAnyArrayBuffer(obj.buffer)) { + if (typeof obj.length !== 'number') { + return new FastBuffer(); + } + return fromArrayLike(obj); + } + + if (obj.type === 'Buffer' && ArrayIsArray(obj.data)) { + return fromArrayLike(obj.data); + } +} + +// Static methods + +Buffer.isBuffer = function isBuffer(b) { + return b instanceof Buffer; +}; + +Buffer.compare = function compare(buf1, buf2) { + if (!isUint8Array(buf1)) { + throw new ERR_INVALID_ARG_TYPE('buf1', ['Buffer', 'Uint8Array'], buf1); + } + + if (!isUint8Array(buf2)) { + throw new ERR_INVALID_ARG_TYPE('buf2', ['Buffer', 'Uint8Array'], buf2); + } + + if (buf1 === buf2) { + return 0; + } + + return _compare(buf1, buf2); +}; + +Buffer.isEncoding = function isEncoding(encoding) { + return typeof encoding === 'string' && encoding.length !== 0 && + normalizeEncoding(encoding) !== undefined; +}; +Buffer[kIsEncodingSymbol] = Buffer.isEncoding; + +Buffer.concat = function concat(list, length) { + validateArray(list, 'list'); + + if (list.length === 0) + return new FastBuffer(); + + if (length === undefined) { + length = 0; + for (let i = 0; i < list.length; i++) { + if (list[i].length) { + length += list[i].length; + } + } + } else { + validateOffset(length, 'length'); + } + + const buffer = Buffer.allocUnsafe(length); + let pos = 0; + for (let i = 0; i < list.length; i++) { + const buf = list[i]; + if (!isUint8Array(buf)) { + // TODO(BridgeAR): This should not be of type ERR_INVALID_ARG_TYPE. + // Instead, find the proper error code for this. + throw new ERR_INVALID_ARG_TYPE( + `list[${i}]`, ['Buffer', 'Uint8Array'], list[i]); + } + pos += _copyActual(buf, buffer, pos, 0, buf.length); + } + + // Note: `length` is always equal to `buffer.length` at this point + if (pos < length) { + // Zero-fill the remaining bytes if the specified `length` was more than + // the actual total length, i.e. if we have some remaining allocated bytes + // there were not initialized. + TypedArrayPrototypeFill(buffer, 0, pos, length); + } + + return buffer; +}; + +function base64ByteLength(str, bytes) { + // Handle padding + if (StringPrototypeCharCodeAt(str, bytes - 1) === 0x3D) + bytes--; + if (bytes > 1 && StringPrototypeCharCodeAt(str, bytes - 1) === 0x3D) + bytes--; + + // Base64 ratio: 3/4 + return (bytes * 3) >>> 2; +} + +const encodingOps = { + utf8: { + encoding: 'utf8', + encodingVal: encodingsMap.utf8, + byteLength: byteLengthUtf8, + write: (buf, string, offset, len) => buf.utf8Write(string, offset, len), + slice: (buf, start, end) => buf.utf8Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf8, dir), + }, + ucs2: { + encoding: 'ucs2', + encodingVal: encodingsMap.utf16le, + byteLength: (string) => string.length * 2, + write: (buf, string, offset, len) => buf.ucs2Write(string, offset, len), + slice: (buf, start, end) => buf.ucs2Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf16le, dir), + }, + utf16le: { + encoding: 'utf16le', + encodingVal: encodingsMap.utf16le, + byteLength: (string) => string.length * 2, + write: (buf, string, offset, len) => buf.ucs2Write(string, offset, len), + slice: (buf, start, end) => buf.ucs2Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.utf16le, dir), + }, + latin1: { + encoding: 'latin1', + encodingVal: encodingsMap.latin1, + byteLength: (string) => string.length, + write: (buf, string, offset, len) => buf.latin1Write(string, offset, len), + slice: (buf, start, end) => buf.latin1Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfString(buf, val, byteOffset, encodingsMap.latin1, dir), + }, + ascii: { + encoding: 'ascii', + encodingVal: encodingsMap.ascii, + byteLength: (string) => string.length, + write: (buf, string, offset, len) => buf.asciiWrite(string, offset, len), + slice: (buf, start, end) => buf.asciiSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.ascii), + byteOffset, + encodingsMap.ascii, + dir), + }, + base64: { + encoding: 'base64', + encodingVal: encodingsMap.base64, + byteLength: (string) => base64ByteLength(string, string.length), + write: (buf, string, offset, len) => buf.base64Write(string, offset, len), + slice: (buf, start, end) => buf.base64Slice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.base64), + byteOffset, + encodingsMap.base64, + dir), + }, + base64url: { + encoding: 'base64url', + encodingVal: encodingsMap.base64url, + byteLength: (string) => base64ByteLength(string, string.length), + write: (buf, string, offset, len) => + buf.base64urlWrite(string, offset, len), + slice: (buf, start, end) => buf.base64urlSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.base64url), + byteOffset, + encodingsMap.base64url, + dir), + }, + hex: { + encoding: 'hex', + encodingVal: encodingsMap.hex, + byteLength: (string) => string.length >>> 1, + write: (buf, string, offset, len) => buf.hexWrite(string, offset, len), + slice: (buf, start, end) => buf.hexSlice(start, end), + indexOf: (buf, val, byteOffset, dir) => + indexOfBuffer(buf, + fromStringFast(val, encodingOps.hex), + byteOffset, + encodingsMap.hex, + dir), + }, +}; +function getEncodingOps(encoding) { + encoding += ''; + switch (encoding.length) { + case 4: + if (encoding === 'utf8') return encodingOps.utf8; + if (encoding === 'ucs2') return encodingOps.ucs2; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'utf8') return encodingOps.utf8; + if (encoding === 'ucs2') return encodingOps.ucs2; + break; + case 5: + if (encoding === 'utf-8') return encodingOps.utf8; + if (encoding === 'ascii') return encodingOps.ascii; + if (encoding === 'ucs-2') return encodingOps.ucs2; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'utf-8') return encodingOps.utf8; + if (encoding === 'ascii') return encodingOps.ascii; + if (encoding === 'ucs-2') return encodingOps.ucs2; + break; + case 7: + if (encoding === 'utf16le' || + StringPrototypeToLowerCase(encoding) === 'utf16le') + return encodingOps.utf16le; + break; + case 8: + if (encoding === 'utf-16le' || + StringPrototypeToLowerCase(encoding) === 'utf-16le') + return encodingOps.utf16le; + break; + case 6: + if (encoding === 'latin1' || encoding === 'binary') + return encodingOps.latin1; + if (encoding === 'base64') return encodingOps.base64; + encoding = StringPrototypeToLowerCase(encoding); + if (encoding === 'latin1' || encoding === 'binary') + return encodingOps.latin1; + if (encoding === 'base64') return encodingOps.base64; + break; + case 3: + if (encoding === 'hex' || StringPrototypeToLowerCase(encoding) === 'hex') + return encodingOps.hex; + break; + case 9: + if (encoding === 'base64url' || + StringPrototypeToLowerCase(encoding) === 'base64url') + return encodingOps.base64url; + break; + } +} + +function byteLength(string, encoding) { + if (typeof string !== 'string') { + if (isArrayBufferView(string) || isAnyArrayBuffer(string)) { + return string.byteLength; + } + + throw new ERR_INVALID_ARG_TYPE( + 'string', ['string', 'Buffer', 'ArrayBuffer'], string, + ); + } + + const len = string.length; + if (len === 0) + return 0; + + if (!encoding || encoding === 'utf8') { + return byteLengthUtf8(string); + } + + if (encoding === 'ascii') { + return len; + } + + const ops = getEncodingOps(encoding); + if (ops === undefined) { + // TODO (ronag): Makes more sense to throw here. + // throw new ERR_UNKNOWN_ENCODING(encoding); + return byteLengthUtf8(string); + } + + return ops.byteLength(string); +} + +Buffer.byteLength = byteLength; + +// For backwards compatibility. +ObjectDefineProperty(Buffer.prototype, 'parent', { + __proto__: null, + enumerable: true, + get() { + if (!(this instanceof Buffer)) + return undefined; + return this.buffer; + }, +}); +ObjectDefineProperty(Buffer.prototype, 'offset', { + __proto__: null, + enumerable: true, + get() { + if (!(this instanceof Buffer)) + return undefined; + return this.byteOffset; + }, +}); + +Buffer.prototype.copy = + function copy(target, targetStart, sourceStart, sourceEnd) { + return copyImpl(this, target, targetStart, sourceStart, sourceEnd); + }; + +// No need to verify that "buf.length <= MAX_UINT32" since it's a read-only +// property of a typed array. +// This behaves neither like String nor Uint8Array in that we set start/end +// to their upper/lower bounds if the value passed is out of range. +Buffer.prototype.toString = function toString(encoding, start, end) { + if (arguments.length === 0) { + return this.utf8Slice(0, this.length); + } + + const len = this.length; + + if (start <= 0) + start = 0; + else if (start >= len) + return ''; + else + start = MathTrunc(start) || 0; + + if (end === undefined || end > len) + end = len; + else + end = MathTrunc(end) || 0; + + if (end <= start) + return ''; + + if (encoding === undefined) + return this.utf8Slice(start, end); + + const ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + + return ops.slice(this, start, end); +}; + +Buffer.prototype.equals = function equals(otherBuffer) { + if (!isUint8Array(otherBuffer)) { + throw new ERR_INVALID_ARG_TYPE( + 'otherBuffer', ['Buffer', 'Uint8Array'], otherBuffer); + } + + if (this === otherBuffer) + return true; + const len = TypedArrayPrototypeGetByteLength(this); + if (len !== TypedArrayPrototypeGetByteLength(otherBuffer)) + return false; + + return len === 0 || _compare(this, otherBuffer) === 0; +}; + +let INSPECT_MAX_BYTES = 50; +// Override how buffers are presented by util.inspect(). +Buffer.prototype[customInspectSymbol] = function inspect(recurseTimes, ctx) { + const max = INSPECT_MAX_BYTES; + const actualMax = MathMin(max, this.length); + const remaining = this.length - max; + let str = StringPrototypeTrim(RegExpPrototypeSymbolReplace( + /(.{2})/g, this.hexSlice(0, actualMax), '$1 ')); + if (remaining > 0) + str += ` ... ${remaining} more byte${remaining > 1 ? 's' : ''}`; + // Inspect special properties as well, if possible. + if (ctx) { + let extras = false; + const filter = ctx.showHidden ? ALL_PROPERTIES : ONLY_ENUMERABLE; + const obj = { __proto__: null }; + ArrayPrototypeForEach(getOwnNonIndexProperties(this, filter), + (key) => { + extras = true; + obj[key] = this[key]; + }); + if (extras) { + if (this.length !== 0) + str += ', '; + // '[Object: null prototype] {'.length === 26 + // This is guarded with a test. + str += StringPrototypeSlice(utilInspect(obj, { + ...ctx, + breakLength: Infinity, + compact: true, + }), 27, -2); + } + } + let constructorName = 'Buffer'; + try { + const { constructor } = this; + if (typeof constructor === 'function' && ObjectPrototypeHasOwnProperty(constructor, 'name')) { + constructorName = constructor.name; + } + } catch { /* Ignore error and use default name */ } + return `<${constructorName} ${str}>`; +}; +Buffer.prototype.inspect = Buffer.prototype[customInspectSymbol]; + +Buffer.prototype.compare = function compare(target, + targetStart, + targetEnd, + sourceStart, + sourceEnd) { + if (!isUint8Array(target)) { + throw new ERR_INVALID_ARG_TYPE('target', ['Buffer', 'Uint8Array'], target); + } + if (arguments.length === 1) + return _compare(this, target); + + if (targetStart === undefined) + targetStart = 0; + else + validateOffset(targetStart, 'targetStart'); + + if (targetEnd === undefined) + targetEnd = target.length; + else + validateOffset(targetEnd, 'targetEnd', 0, target.length); + + if (sourceStart === undefined) + sourceStart = 0; + else + validateOffset(sourceStart, 'sourceStart'); + + if (sourceEnd === undefined) + sourceEnd = this.length; + else + validateOffset(sourceEnd, 'sourceEnd', 0, this.length); + + if (sourceStart >= sourceEnd) + return (targetStart >= targetEnd ? 0 : -1); + if (targetStart >= targetEnd) + return 1; + + return compareOffset(this, target, targetStart, sourceStart, targetEnd, + sourceEnd); +}; + +// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`, +// OR the last index of `val` in `buffer` at offset <= `byteOffset`. +// +// Arguments: +// - buffer - a Buffer to search +// - val - a string, Buffer, or number +// - byteOffset - an index into `buffer`; will be clamped to an int32 +// - encoding - an optional encoding, relevant if val is a string +// - dir - true for indexOf, false for lastIndexOf +function bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) { + validateBuffer(buffer); + + if (typeof byteOffset === 'string') { + encoding = byteOffset; + byteOffset = undefined; + } else if (byteOffset > 0x7fffffff) { + byteOffset = 0x7fffffff; + } else if (byteOffset < -0x80000000) { + byteOffset = -0x80000000; + } + // Coerce to Number. Values like null and [] become 0. + byteOffset = +byteOffset; + // If the offset is undefined, "foo", {}, coerces to NaN, search whole buffer. + if (NumberIsNaN(byteOffset)) { + byteOffset = dir ? 0 : (buffer.length || buffer.byteLength); + } + dir = !!dir; // Cast to bool. + + if (typeof val === 'number') + return indexOfNumber(buffer, val >>> 0, byteOffset, dir); + + let ops; + if (encoding === undefined) + ops = encodingOps.utf8; + else + ops = getEncodingOps(encoding); + + if (typeof val === 'string') { + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + return ops.indexOf(buffer, val, byteOffset, dir); + } + + if (isUint8Array(val)) { + const encodingVal = + (ops === undefined ? encodingsMap.utf8 : ops.encodingVal); + return indexOfBuffer(buffer, val, byteOffset, encodingVal, dir); + } + + throw new ERR_INVALID_ARG_TYPE( + 'value', ['number', 'string', 'Buffer', 'Uint8Array'], val, + ); +} + +Buffer.prototype.indexOf = function indexOf(val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, true); +}; + +Buffer.prototype.lastIndexOf = function lastIndexOf(val, byteOffset, encoding) { + return bidirectionalIndexOf(this, val, byteOffset, encoding, false); +}; + +Buffer.prototype.includes = function includes(val, byteOffset, encoding) { + return this.indexOf(val, byteOffset, encoding) !== -1; +}; + +// Usage: +// buffer.fill(number[, offset[, end]]) +// buffer.fill(buffer[, offset[, end]]) +// buffer.fill(string[, offset[, end]][, encoding]) +Buffer.prototype.fill = function fill(value, offset, end, encoding) { + return _fill(this, value, offset, end, encoding); +}; + +function _fill(buf, value, offset, end, encoding) { + if (typeof value === 'string') { + if (offset === undefined || typeof offset === 'string') { + encoding = offset; + offset = 0; + end = buf.length; + } else if (typeof end === 'string') { + encoding = end; + end = buf.length; + } + + const normalizedEncoding = normalizeEncoding(encoding); + if (normalizedEncoding === undefined) { + validateString(encoding, 'encoding'); + throw new ERR_UNKNOWN_ENCODING(encoding); + } + + if (value.length === 0) { + // If value === '' default to zero. + value = 0; + } else if (value.length === 1) { + // Fast path: If `value` fits into a single byte, use that numeric value. + if (normalizedEncoding === 'utf8') { + const code = StringPrototypeCharCodeAt(value, 0); + if (code < 128) { + value = code; + } + } else if (normalizedEncoding === 'latin1') { + value = StringPrototypeCharCodeAt(value, 0); + } + } + } else { + encoding = undefined; + } + + if (offset === undefined) { + offset = 0; + end = buf.length; + } else { + validateOffset(offset, 'offset'); + // Invalid ranges are not set to a default, so can range check early. + if (end === undefined) { + end = buf.length; + } else { + validateOffset(end, 'end', 0, buf.length); + } + if (offset >= end) + return buf; + } + + + if (typeof value === 'number') { + // OOB check + const byteLen = TypedArrayPrototypeGetByteLength(buf); + const fillLength = end - offset; + if (offset > end || fillLength + offset > byteLen) + throw new ERR_BUFFER_OUT_OF_BOUNDS(); + + TypedArrayPrototypeFill(buf, value, offset, end); + } else { + const res = bindingFill(buf, value, offset, end, encoding); + if (res < 0) { + if (res === -1) + throw new ERR_INVALID_ARG_VALUE('value', value); + throw new ERR_BUFFER_OUT_OF_BOUNDS(); + } + } + + return buf; +} + +Buffer.prototype.write = function write(string, offset, length, encoding) { + // Buffer#write(string); + if (offset === undefined) { + return this.utf8Write(string, 0, this.length); + } + // Buffer#write(string, encoding) + if (length === undefined && typeof offset === 'string') { + encoding = offset; + length = this.length; + offset = 0; + + // Buffer#write(string, offset[, length][, encoding]) + } else { + validateOffset(offset, 'offset', 0, this.length); + + const remaining = this.length - offset; + + if (length === undefined) { + length = remaining; + } else if (typeof length === 'string') { + encoding = length; + length = remaining; + } else { + validateOffset(length, 'length', 0, this.length); + if (length > remaining) + length = remaining; + } + } + + if (!encoding || encoding === 'utf8') + return this.utf8Write(string, offset, length); + if (encoding === 'ascii') + return this.asciiWrite(string, offset, length); + + const ops = getEncodingOps(encoding); + if (ops === undefined) + throw new ERR_UNKNOWN_ENCODING(encoding); + return ops.write(this, string, offset, length); +}; + +Buffer.prototype.toJSON = function toJSON() { + if (this.length > 0) { + const data = new Array(this.length); + for (let i = 0; i < this.length; ++i) + data[i] = this[i]; + return { type: 'Buffer', data }; + } + return { type: 'Buffer', data: [] }; +}; + +function adjustOffset(offset, length) { + // Use Math.trunc() to convert offset to an integer value that can be larger + // than an Int32. Hence, don't use offset | 0 or similar techniques. + offset = MathTrunc(offset); + if (offset === 0) { + return 0; + } + if (offset < 0) { + offset += length; + return offset > 0 ? offset : 0; + } + if (offset < length) { + return offset; + } + return NumberIsNaN(offset) ? 0 : length; +} + +Buffer.prototype.subarray = function subarray(start, end) { + const srcLength = this.length; + start = adjustOffset(start, srcLength); + end = end !== undefined ? adjustOffset(end, srcLength) : srcLength; + const newLength = end > start ? end - start : 0; + return new FastBuffer(this.buffer, this.byteOffset + start, newLength); +}; + +Buffer.prototype.slice = function slice(start, end) { + return this.subarray(start, end); +}; + +function swap(b, n, m) { + const i = b[n]; + b[n] = b[m]; + b[m] = i; +} + +Buffer.prototype.swap16 = function swap16() { + // For Buffer.length < 128, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 2 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('16-bits'); + if (len < 128) { + for (let i = 0; i < len; i += 2) + swap(this, i, i + 1); + return this; + } + return _swap16(this); +}; + +Buffer.prototype.swap32 = function swap32() { + // For Buffer.length < 192, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 4 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('32-bits'); + if (len < 192) { + for (let i = 0; i < len; i += 4) { + swap(this, i, i + 3); + swap(this, i + 1, i + 2); + } + return this; + } + return _swap32(this); +}; + +Buffer.prototype.swap64 = function swap64() { + // For Buffer.length < 192, it's generally faster to + // do the swap in javascript. For larger buffers, + // dropping down to the native code is faster. + const len = this.length; + if (len % 8 !== 0) + throw new ERR_INVALID_BUFFER_SIZE('64-bits'); + if (len < 192) { + for (let i = 0; i < len; i += 8) { + swap(this, i, i + 7); + swap(this, i + 1, i + 6); + swap(this, i + 2, i + 5); + swap(this, i + 3, i + 4); + } + return this; + } + return _swap64(this); +}; + +Buffer.prototype.toLocaleString = Buffer.prototype.toString; + +let transcode; +if (internalBinding('config').hasIntl) { + const { + icuErrName, + transcode: _transcode, + } = internalBinding('icu'); + + // Transcodes the Buffer from one encoding to another, returning a new + // Buffer instance. + transcode = function transcode(source, fromEncoding, toEncoding) { + if (!isUint8Array(source)) { + throw new ERR_INVALID_ARG_TYPE('source', + ['Buffer', 'Uint8Array'], source); + } + if (source.length === 0) return Buffer.alloc(0); + + fromEncoding = normalizeEncoding(fromEncoding) || fromEncoding; + toEncoding = normalizeEncoding(toEncoding) || toEncoding; + const result = _transcode(source, fromEncoding, toEncoding); + if (typeof result !== 'number') + return result; + + const code = icuErrName(result); + const err = genericNodeError( + `Unable to transcode Buffer [${code}]`, + { code: code, errno: result }, + ); + throw err; + }; +} + +function btoa(input) { + // The implementation here has not been performance optimized in any way and + // should not be. + // Refs: https://github.com/nodejs/node/pull/38433#issuecomment-828426932 + if (arguments.length === 0) { + throw new ERR_MISSING_ARGS('input'); + } + const result = _btoa(`${input}`); + if (result === -1) { + throw lazyDOMException('Invalid character', 'InvalidCharacterError'); + } + return result; +} + +function atob(input) { + if (arguments.length === 0) { + throw new ERR_MISSING_ARGS('input'); + } + + const result = _atob(`${input}`); + + switch (result) { + case -2: // Invalid character + throw lazyDOMException('Invalid character', 'InvalidCharacterError'); + case -1: // Single character remained + throw lazyDOMException( + 'The string to be decoded is not correctly encoded.', + 'InvalidCharacterError'); + case -3: // Possible overflow + // TODO(@anonrig): Throw correct error in here. + throw lazyDOMException('The input causes overflow.', 'InvalidCharacterError'); + default: + return result; + } +} + +function isUtf8(input) { + if (isTypedArray(input) || isAnyArrayBuffer(input)) { + return bindingIsUtf8(input); + } + + throw new ERR_INVALID_ARG_TYPE('input', ['ArrayBuffer', 'Buffer', 'TypedArray'], input); +} + +function isAscii(input) { + if (isTypedArray(input) || isAnyArrayBuffer(input)) { + return bindingIsAscii(input); + } + + throw new ERR_INVALID_ARG_TYPE('input', ['ArrayBuffer', 'Buffer', 'TypedArray'], input); +} + +module.exports = { + Buffer, + SlowBuffer: deprecate( + SlowBuffer, + 'SlowBuffer() is deprecated. Please use Buffer.allocUnsafeSlow()', + 'DEP0030'), + transcode, + isUtf8, + isAscii, + + // Legacy + kMaxLength, + kStringMaxLength, + btoa, + atob, +}; + +ObjectDefineProperties(module.exports, { + constants: { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, + }, + INSPECT_MAX_BYTES: { + __proto__: null, + configurable: true, + enumerable: true, + get() { return INSPECT_MAX_BYTES; }, + set(val) { + validateNumber(val, 'INSPECT_MAX_BYTES', 0); + INSPECT_MAX_BYTES = val; + }, + }, +}); + +defineLazyProperties( + module.exports, + 'internal/blob', + ['Blob', 'resolveObjectURL'], +); +defineLazyProperties( + module.exports, + 'internal/file', + ['File'], +); \ No newline at end of file diff --git a/.codesandbox/node/dfn.js b/.codesandbox/node/dfn.js new file mode 100644 index 00000000..29b453dc --- /dev/null +++ b/.codesandbox/node/dfn.js @@ -0,0 +1,118 @@ +var dfnMapTarget = -1; +var dfnMapDone = 0; +var dfnMap = {}; +document.addEventListener('DOMContentLoaded', function (event) { + var links = []; + dfnMapTarget = document.links.length; + for (var i = 0; i < dfnMapTarget; i += 1) + links[i] = document.links[i]; + var inc = 100; + for (var i = 0; i < dfnMapTarget; i += inc) { + setTimeout(function (j) { + for (var k = j; k < j+inc && k < dfnMapTarget; k += 1) { + if (links[k].href.indexOf('#') >= 0) { + if (links[k].className != "no-backref" && + links[k].parentNode.className != "no-backref") { + var s = links[k].href.substr(links[k].href.indexOf('#') + 1); + if (!(s in dfnMap)) + dfnMap[s] = []; + dfnMap[s].push(links[k]); + } + } + dfnMapDone += 1; + } + }, 0, i); + } + document.body.className += " dfnEnabled"; +}, false); + +var dfnPanel; +var dfnUniqueId = 0; +var dfnTimeout; +document.addEventListener('click', dfnShow, false); +function dfnShow(event) { + if (dfnTimeout) { + clearTimeout(dfnTimeout); + dfnTimeout = null; + } + if (dfnPanel) { + dfnPanel.parentNode.removeChild(dfnPanel); + dfnPanel = null; + } + if (dfnMapDone == dfnMapTarget) { + var node = event.target; + while (node && (node.nodeType != event.target.ELEMENT_NODE || node.tagName != "DFN")) + node = node.parentNode; + if (node) { + var panel = document.createElement('div'); + panel.className = 'dfnPanel'; + if (node.id) { + var permalinkP = document.createElement('p'); + var permalinkA = document.createElement('a'); + permalinkA.href = '#' + node.id; + permalinkA.textContent = '#' + node.id; + permalinkP.appendChild(permalinkA); + panel.appendChild(permalinkP); + } + var p = document.createElement('p'); + panel.appendChild(p); + if (node.id in dfnMap || node.parentNode.id in dfnMap) { + p.textContent = 'Referenced in:'; + var ul = document.createElement('ul'); + var lastHeader; + var lastLi; + var n; + var sourceLinks = []; + if (node.id in dfnMap) + for (var i = 0; i < dfnMap[node.id].length; i += 1) + sourceLinks.push(dfnMap[node.id][i]); + if (node.parentNode.id in dfnMap) + for (var i = 0; i < dfnMap[node.parentNode.id].length; i += 1) + sourceLinks.push(dfnMap[node.parentNode.id][i]); + for (var i = 0; i < sourceLinks.length; i += 1) { + var link = sourceLinks[i]; + var header = dfnGetCaption(link); + var a = document.createElement('a'); + if (!link.id) + link.id = 'dfnReturnLink-' + dfnUniqueId++; + a.href = '#' + link.id; + if (header != lastHeader) { + lastHeader = header; + n = 1; + var li = document.createElement('li'); + var cloneHeader = header.cloneNode(true); + while (cloneHeader.hasChildNodes()) + if (cloneHeader.firstChild.className == 'section-link') + cloneHeader.removeChild(cloneHeader.firstChild); + else + a.appendChild(cloneHeader.firstChild); + lastLi = li; + li.appendChild(a); + ul.appendChild(li); + } else { + n += 1; + a.appendChild(document.createTextNode('(' + n + ')')); + lastLi.appendChild(document.createTextNode(' ')); + lastLi.appendChild(a); + } + } + panel.appendChild(ul); + } else { + p.textContent = 'No references in this file.'; + } + node.appendChild(panel); + dfnPanel = panel; + } + } else { + dfnTimeout = setTimeout(dfnShow, 250, event); + } +} + +function dfnGetCaption(link) { + var node = link; + while (node && !(node.parentNode.tagName == "DIV" && node.parentNode.className == "section")) + node = node.parentNode; + while (node && (node.nodeType != node.ELEMENT_NODE || !node.tagName.match(/^H[1-6]$/))) + node = node.previousSibling; + return node; +} \ No newline at end of file diff --git a/.codesandbox/node/dgram.js b/.codesandbox/node/dgram.js new file mode 100644 index 00000000..c77ea89c --- /dev/null +++ b/.codesandbox/node/dgram.js @@ -0,0 +1,1111 @@ +'use strict'; + +const { + Array, + ArrayIsArray, + ArrayPrototypePush, + FunctionPrototypeBind, + FunctionPrototypeCall, + ObjectDefineProperty, + ObjectSetPrototypeOf, + ReflectApply, + SymbolAsyncDispose, + SymbolDispose, +} = primordials; + +const { + ErrnoException, + ExceptionWithHostPort, + codes: { + ERR_BUFFER_OUT_OF_BOUNDS, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_FD_TYPE, + ERR_IP_BLOCKED, + ERR_MISSING_ARGS, + ERR_SOCKET_ALREADY_BOUND, + ERR_SOCKET_BAD_BUFFER_SIZE, + ERR_SOCKET_BUFFER_SIZE, + ERR_SOCKET_DGRAM_IS_CONNECTED, + ERR_SOCKET_DGRAM_NOT_CONNECTED, + ERR_SOCKET_DGRAM_NOT_RUNNING, + }, +} = require('internal/errors'); +const { + kStateSymbol, + _createSocketHandle, + newHandle, +} = require('internal/dgram'); +const { isIP } = require('internal/net'); +const { + isInt32, + validateAbortSignal, + validateString, + validateNumber, + validatePort, + validateUint32, +} = require('internal/validators'); +const { Buffer } = require('buffer'); +const { deprecate, guessHandleType, promisify } = require('internal/util'); +const { isArrayBufferView } = require('internal/util/types'); +const EventEmitter = require('events'); +const { addAbortListener } = require('internal/events/abort_listener'); +const { + defaultTriggerAsyncIdScope, + symbols: { async_id_symbol, owner_symbol }, +} = require('internal/async_hooks'); +const { UV_UDP_REUSEADDR } = internalBinding('constants').os; + +const { + constants: { UV_UDP_IPV6ONLY, UV_UDP_REUSEPORT }, + UDP, + SendWrap, +} = internalBinding('udp_wrap'); + +const dc = require('diagnostics_channel'); +const udpSocketChannel = dc.channel('udp.socket'); + +const BIND_STATE_UNBOUND = 0; +const BIND_STATE_BINDING = 1; +const BIND_STATE_BOUND = 2; + +const CONNECT_STATE_DISCONNECTED = 0; +const CONNECT_STATE_CONNECTING = 1; +const CONNECT_STATE_CONNECTED = 2; + +const RECV_BUFFER = true; +const SEND_BUFFER = false; + +// Lazily loaded +let _cluster = null; +function lazyLoadCluster() { + return _cluster ??= require('cluster'); +} +let _blockList = null; +function lazyLoadBlockList() { + return _blockList ??= require('internal/blocklist').BlockList; +} + +function Socket(type, listener) { + FunctionPrototypeCall(EventEmitter, this); + let lookup; + let recvBufferSize; + let sendBufferSize; + let receiveBlockList; + let sendBlockList; + + let options; + if (type !== null && typeof type === 'object') { + options = type; + type = options.type; + lookup = options.lookup; + if (options.recvBufferSize) { + validateUint32(options.recvBufferSize, 'options.recvBufferSize'); + } + if (options.sendBufferSize) { + validateUint32(options.sendBufferSize, 'options.sendBufferSize'); + } + recvBufferSize = options.recvBufferSize; + sendBufferSize = options.sendBufferSize; + if (options.receiveBlockList) { + if (!lazyLoadBlockList().isBlockList(options.receiveBlockList)) { + throw new ERR_INVALID_ARG_TYPE('options.receiveBlockList', 'net.BlockList', options.receiveBlockList); + } + receiveBlockList = options.receiveBlockList; + } + if (options.sendBlockList) { + if (!lazyLoadBlockList().isBlockList(options.sendBlockList)) { + throw new ERR_INVALID_ARG_TYPE('options.sendBlockList', 'net.BlockList', options.sendBlockList); + } + sendBlockList = options.sendBlockList; + } + } + + const handle = newHandle(type, lookup); + handle[owner_symbol] = this; + + this[async_id_symbol] = handle.getAsyncId(); + this.type = type; + + if (typeof listener === 'function') + this.on('message', listener); + + this[kStateSymbol] = { + handle, + receiving: false, + bindState: BIND_STATE_UNBOUND, + connectState: CONNECT_STATE_DISCONNECTED, + queue: undefined, + reuseAddr: options?.reuseAddr, // Use UV_UDP_REUSEADDR if true. + reusePort: options?.reusePort, + ipv6Only: options?.ipv6Only, + recvBufferSize, + sendBufferSize, + receiveBlockList, + sendBlockList, + }; + + if (options?.signal !== undefined) { + const { signal } = options; + validateAbortSignal(signal, 'options.signal'); + const onAborted = () => { + if (this[kStateSymbol].handle) this.close(); + }; + if (signal.aborted) { + onAborted(); + } else { + const disposable = addAbortListener(signal, onAborted); + this.once('close', disposable[SymbolDispose]); + } + } + if (udpSocketChannel.hasSubscribers) { + udpSocketChannel.publish({ + socket: this, + }); + } +} +ObjectSetPrototypeOf(Socket.prototype, EventEmitter.prototype); +ObjectSetPrototypeOf(Socket, EventEmitter); + + +function createSocket(type, listener) { + return new Socket(type, listener); +} + + +function startListening(socket) { + const state = socket[kStateSymbol]; + + state.handle.onmessage = onMessage; + state.handle.onerror = onError; + state.handle.recvStart(); + state.receiving = true; + state.bindState = BIND_STATE_BOUND; + + if (state.recvBufferSize) + bufferSize(socket, state.recvBufferSize, RECV_BUFFER); + + if (state.sendBufferSize) + bufferSize(socket, state.sendBufferSize, SEND_BUFFER); + + socket.emit('listening'); +} + +function replaceHandle(self, newHandle) { + const state = self[kStateSymbol]; + const oldHandle = state.handle; + // Sync the old handle state to new handle + if (!oldHandle.hasRef() && typeof newHandle.unref === 'function') { + newHandle.unref(); + } + // Set up the handle that we got from primary. + newHandle.lookup = oldHandle.lookup; + newHandle.bind = oldHandle.bind; + newHandle.send = oldHandle.send; + newHandle[owner_symbol] = self; + + // Replace the existing handle by the handle we got from primary. + oldHandle.close(); + state.handle = newHandle; +} + +function bufferSize(self, size, buffer) { + if (size >>> 0 !== size) + throw new ERR_SOCKET_BAD_BUFFER_SIZE(); + + const ctx = {}; + const ret = self[kStateSymbol].handle.bufferSize(size, buffer, ctx); + if (ret === undefined) { + throw new ERR_SOCKET_BUFFER_SIZE(ctx); + } + return ret; +} + +// Query primary process to get the server handle and utilize it. +function bindServerHandle(self, options, errCb) { + const cluster = lazyLoadCluster(); + + const state = self[kStateSymbol]; + cluster._getServer(self, options, (err, handle) => { + if (err) { + // Do not call callback if socket is closed + if (state.handle) { + errCb(err); + } + return; + } + + if (!state.handle) { + // Handle has been closed in the mean time. + return handle.close(); + } + + replaceHandle(self, handle); + startListening(self); + }); +} + +Socket.prototype.bind = function(port_, address_ /* , callback */) { + let port = port_; + + healthCheck(this); + const state = this[kStateSymbol]; + + if (state.bindState !== BIND_STATE_UNBOUND) + throw new ERR_SOCKET_ALREADY_BOUND(); + + state.bindState = BIND_STATE_BINDING; + + const cb = arguments.length && arguments[arguments.length - 1]; + if (typeof cb === 'function') { + function removeListeners() { + this.removeListener('error', removeListeners); + this.removeListener('listening', onListening); + } + + function onListening() { + FunctionPrototypeCall(removeListeners, this); + FunctionPrototypeCall(cb, this); + } + + this.on('error', removeListeners); + this.on('listening', onListening); + } + + if (port !== null && + typeof port === 'object' && + typeof port.recvStart === 'function') { + replaceHandle(this, port); + startListening(this); + return this; + } + + // Open an existing fd instead of creating a new one. + if (port !== null && typeof port === 'object' && + isInt32(port.fd) && port.fd > 0) { + const fd = port.fd; + const exclusive = !!port.exclusive; + const state = this[kStateSymbol]; + + const cluster = lazyLoadCluster(); + + if (cluster.isWorker && !exclusive) { + bindServerHandle(this, { + address: null, + port: null, + addressType: this.type, + fd, + flags: null, + }, (err) => { + // Callback to handle error. + const ex = new ErrnoException(err, 'open'); + state.bindState = BIND_STATE_UNBOUND; + this.emit('error', ex); + }); + return this; + } + + const type = guessHandleType(fd); + if (type !== 'UDP') + throw new ERR_INVALID_FD_TYPE(type); + const err = state.handle.open(fd); + + if (err) + throw new ErrnoException(err, 'open'); + + startListening(this); + return this; + } + + let address; + let exclusive; + + if (port !== null && typeof port === 'object') { + address = port.address || ''; + exclusive = !!port.exclusive; + port = port.port; + } else { + address = typeof address_ === 'function' ? '' : address_; + exclusive = false; + } + + // Defaulting address for bind to all interfaces + if (!address) { + if (this.type === 'udp4') + address = '0.0.0.0'; + else + address = '::'; + } + + // Resolve address first + state.handle.lookup(address, (err, ip) => { + if (!state.handle) + return; // Handle has been closed in the mean time + + if (err) { + state.bindState = BIND_STATE_UNBOUND; + this.emit('error', err); + return; + } + + const cluster = lazyLoadCluster(); + + let flags = 0; + if (state.reuseAddr) + flags |= UV_UDP_REUSEADDR; + if (state.ipv6Only) + flags |= UV_UDP_IPV6ONLY; + if (state.reusePort) { + exclusive = true; + flags |= UV_UDP_REUSEPORT; + } + + if (cluster.isWorker && !exclusive) { + bindServerHandle(this, { + address: ip, + port: port, + addressType: this.type, + fd: -1, + flags: flags, + }, (err) => { + // Callback to handle error. + const ex = new ExceptionWithHostPort(err, 'bind', ip, port); + state.bindState = BIND_STATE_UNBOUND; + this.emit('error', ex); + }); + } else { + const err = state.handle.bind(ip, port || 0, flags); + if (err) { + const ex = new ExceptionWithHostPort(err, 'bind', ip, port); + state.bindState = BIND_STATE_UNBOUND; + this.emit('error', ex); + // Todo: close? + return; + } + + startListening(this); + } + }); + + return this; +}; + +Socket.prototype.connect = function(port, address, callback) { + port = validatePort(port, 'Port', false); + if (typeof address === 'function') { + callback = address; + address = ''; + } else if (address === undefined) { + address = ''; + } + + validateString(address, 'address'); + + const state = this[kStateSymbol]; + + if (state.connectState !== CONNECT_STATE_DISCONNECTED) + throw new ERR_SOCKET_DGRAM_IS_CONNECTED(); + + state.connectState = CONNECT_STATE_CONNECTING; + if (state.bindState === BIND_STATE_UNBOUND) + this.bind({ port: 0, exclusive: true }, null); + + if (state.bindState !== BIND_STATE_BOUND) { + enqueue(this, FunctionPrototypeBind(_connect, this, + port, address, callback)); + return; + } + + ReflectApply(_connect, this, [port, address, callback]); +}; + + +function _connect(port, address, callback) { + const state = this[kStateSymbol]; + if (callback) + this.once('connect', callback); + + const afterDns = (ex, ip) => { + defaultTriggerAsyncIdScope( + this[async_id_symbol], + doConnect, + ex, this, ip, address, port, callback, + ); + }; + + state.handle.lookup(address, afterDns); +} + + +function doConnect(ex, self, ip, address, port, callback) { + const state = self[kStateSymbol]; + if (!state.handle) + return; + if (!ex && state.sendBlockList?.check(ip, `ipv${isIP(ip)}`)) { + ex = new ERR_IP_BLOCKED(ip); + } + if (!ex) { + const err = state.handle.connect(ip, port); + if (err) { + ex = new ExceptionWithHostPort(err, 'connect', address, port); + } + } + + if (ex) { + state.connectState = CONNECT_STATE_DISCONNECTED; + return process.nextTick(() => { + if (callback) { + self.removeListener('connect', callback); + callback(ex); + } else { + self.emit('error', ex); + } + }); + } + + state.connectState = CONNECT_STATE_CONNECTED; + process.nextTick(() => self.emit('connect')); +} + + +Socket.prototype.disconnect = function() { + const state = this[kStateSymbol]; + if (state.connectState !== CONNECT_STATE_CONNECTED) + throw new ERR_SOCKET_DGRAM_NOT_CONNECTED(); + + const err = state.handle.disconnect(); + if (err) + throw new ErrnoException(err, 'connect'); + else + state.connectState = CONNECT_STATE_DISCONNECTED; +}; + + +// Thin wrapper around `send`, here for compatibility with dgram_legacy.js +Socket.prototype.sendto = function(buffer, + offset, + length, + port, + address, + callback) { + validateNumber(offset, 'offset'); + validateNumber(length, 'length'); + validateNumber(port, 'port'); + validateString(address, 'address'); + + this.send(buffer, offset, length, port, address, callback); +}; + + +function sliceBuffer(buffer, offset, length) { + if (typeof buffer === 'string') { + buffer = Buffer.from(buffer); + } else if (!isArrayBufferView(buffer)) { + throw new ERR_INVALID_ARG_TYPE('buffer', + ['Buffer', + 'TypedArray', + 'DataView', + 'string'], + buffer); + } + + offset = offset >>> 0; + length = length >>> 0; + if (offset > buffer.byteLength) { + throw new ERR_BUFFER_OUT_OF_BOUNDS('offset'); + } + + if (offset + length > buffer.byteLength) { + throw new ERR_BUFFER_OUT_OF_BOUNDS('length'); + } + + return Buffer.from(buffer.buffer, buffer.byteOffset + offset, length); +} + + +function fixBufferList(list) { + const newlist = new Array(list.length); + + for (let i = 0, l = list.length; i < l; i++) { + const buf = list[i]; + if (typeof buf === 'string') + newlist[i] = Buffer.from(buf); + else if (Buffer.isBuffer(buf)) + newlist[i] = buf; + else if (!isArrayBufferView(buf)) + return null; + else + newlist[i] = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength); + } + + return newlist; +} + + +function enqueue(self, toEnqueue) { + const state = self[kStateSymbol]; + + // If the send queue hasn't been initialized yet, do it, and install an + // event handler that flushes the send queue after binding is done. + if (state.queue === undefined) { + state.queue = []; + self.once(EventEmitter.errorMonitor, onListenError); + self.once('listening', onListenSuccess); + } + ArrayPrototypePush(state.queue, toEnqueue); +} + + +function onListenSuccess() { + this.removeListener(EventEmitter.errorMonitor, onListenError); + FunctionPrototypeCall(clearQueue, this); +} + + +function onListenError(err) { + this.removeListener('listening', onListenSuccess); + this[kStateSymbol].queue = undefined; +} + + +function clearQueue() { + const state = this[kStateSymbol]; + const queue = state.queue; + state.queue = undefined; + + // Flush the send queue. + for (const queueEntry of queue) + queueEntry(); +} + +// valid combinations +// For connectionless sockets +// send(buffer, offset, length, port, address, callback) +// send(buffer, offset, length, port, address) +// send(buffer, offset, length, port, callback) +// send(buffer, offset, length, port) +// send(bufferOrList, port, address, callback) +// send(bufferOrList, port, address) +// send(bufferOrList, port, callback) +// send(bufferOrList, port) +// For connected sockets +// send(buffer, offset, length, callback) +// send(buffer, offset, length) +// send(bufferOrList, callback) +// send(bufferOrList) +Socket.prototype.send = function(buffer, + offset, + length, + port, + address, + callback) { + + let list; + const state = this[kStateSymbol]; + const connected = state.connectState === CONNECT_STATE_CONNECTED; + if (!connected) { + if (address || (port && typeof port !== 'function')) { + buffer = sliceBuffer(buffer, offset, length); + } else { + callback = port; + port = offset; + address = length; + } + } else { + if (typeof length === 'number') { + buffer = sliceBuffer(buffer, offset, length); + if (typeof port === 'function') { + callback = port; + port = null; + } + } else { + callback = offset; + } + + if (port || address) + throw new ERR_SOCKET_DGRAM_IS_CONNECTED(); + } + + if (!ArrayIsArray(buffer)) { + if (typeof buffer === 'string') { + list = [ Buffer.from(buffer) ]; + } else if (!isArrayBufferView(buffer)) { + throw new ERR_INVALID_ARG_TYPE('buffer', + ['Buffer', + 'TypedArray', + 'DataView', + 'string'], + buffer); + } else { + list = [ buffer ]; + } + } else if (!(list = fixBufferList(buffer))) { + throw new ERR_INVALID_ARG_TYPE('buffer list arguments', + ['Buffer', + 'TypedArray', + 'DataView', + 'string'], + buffer); + } + + if (!connected) + port = validatePort(port, 'Port', false); + + // Normalize callback so it's either a function or undefined but not anything + // else. + if (typeof callback !== 'function') + callback = undefined; + + if (typeof address === 'function') { + callback = address; + address = undefined; + } else if (address != null) { + validateString(address, 'address'); + } + + healthCheck(this); + + if (state.bindState === BIND_STATE_UNBOUND) + this.bind({ port: 0, exclusive: true }, null); + + if (list.length === 0) + ArrayPrototypePush(list, Buffer.alloc(0)); + + // If the socket hasn't been bound yet, push the outbound packet onto the + // send queue and send after binding is complete. + if (state.bindState !== BIND_STATE_BOUND) { + enqueue(this, FunctionPrototypeBind(this.send, this, + list, port, address, callback)); + return; + } + + const afterDns = (ex, ip) => { + defaultTriggerAsyncIdScope( + this[async_id_symbol], + doSend, + ex, this, ip, list, address, port, callback, + ); + }; + + if (!connected) { + state.handle.lookup(address, afterDns); + } else { + afterDns(null, null); + } +}; + +function doSend(ex, self, ip, list, address, port, callback) { + const state = self[kStateSymbol]; + + if (ex) { + if (typeof callback === 'function') { + process.nextTick(callback, ex); + return; + } + + process.nextTick(() => self.emit('error', ex)); + return; + } else if (!state.handle) { + return; + } + + if (ip && state.sendBlockList?.check(ip, `ipv${isIP(ip)}`)) { + if (callback) { + process.nextTick(callback, new ERR_IP_BLOCKED(ip)); + } + return; + } + + const req = new SendWrap(); + req.list = list; // Keep reference alive. + req.address = address; + req.port = port; + if (callback) { + req.callback = callback; + req.oncomplete = afterSend; + } + + let err; + if (port) + err = state.handle.send(req, list, list.length, port, ip, !!callback); + else + err = state.handle.send(req, list, list.length, !!callback); + + if (err >= 1) { + // Synchronous finish. The return code is msg_length + 1 so that we can + // distinguish between synchronous success and asynchronous success. + if (callback) + process.nextTick(callback, null, err - 1); + return; + } + + if (err && callback) { + // Don't emit as error, dgram_legacy.js compatibility + const ex = new ExceptionWithHostPort(err, 'send', address, port); + process.nextTick(callback, ex); + } +} + +function afterSend(err, sent) { + if (err) { + err = new ExceptionWithHostPort(err, 'send', this.address, this.port); + } else { + err = null; + } + + this.callback(err, sent); +} + +Socket.prototype.close = function(callback) { + const state = this[kStateSymbol]; + const queue = state.queue; + + if (typeof callback === 'function') + this.on('close', callback); + + if (queue !== undefined) { + ArrayPrototypePush(queue, FunctionPrototypeBind(this.close, this)); + return this; + } + + healthCheck(this); + stopReceiving(this); + state.handle.close(); + state.handle = null; + defaultTriggerAsyncIdScope(this[async_id_symbol], + process.nextTick, + socketCloseNT, + this); + + return this; +}; + +Socket.prototype[SymbolAsyncDispose] = async function() { + if (!this[kStateSymbol].handle) { + return; + } + await FunctionPrototypeCall(promisify(this.close), this); +}; + + +function socketCloseNT(self) { + self.emit('close'); +} + + +Socket.prototype.address = function() { + healthCheck(this); + + const out = {}; + const err = this[kStateSymbol].handle.getsockname(out); + if (err) { + throw new ErrnoException(err, 'getsockname'); + } + + return out; +}; + +Socket.prototype.remoteAddress = function() { + healthCheck(this); + + const state = this[kStateSymbol]; + if (state.connectState !== CONNECT_STATE_CONNECTED) + throw new ERR_SOCKET_DGRAM_NOT_CONNECTED(); + + const out = {}; + const err = state.handle.getpeername(out); + if (err) + throw new ErrnoException(err, 'getpeername'); + + return out; +}; + + +Socket.prototype.setBroadcast = function(arg) { + const err = this[kStateSymbol].handle.setBroadcast(arg ? 1 : 0); + if (err) { + throw new ErrnoException(err, 'setBroadcast'); + } +}; + + +Socket.prototype.setTTL = function(ttl) { + validateNumber(ttl, 'ttl'); + + const err = this[kStateSymbol].handle.setTTL(ttl); + if (err) { + throw new ErrnoException(err, 'setTTL'); + } + + return ttl; +}; + + +Socket.prototype.setMulticastTTL = function(ttl) { + validateNumber(ttl, 'ttl'); + + const err = this[kStateSymbol].handle.setMulticastTTL(ttl); + if (err) { + throw new ErrnoException(err, 'setMulticastTTL'); + } + + return ttl; +}; + + +Socket.prototype.setMulticastLoopback = function(arg) { + const err = this[kStateSymbol].handle.setMulticastLoopback(arg ? 1 : 0); + if (err) { + throw new ErrnoException(err, 'setMulticastLoopback'); + } + + return arg; // 0.4 compatibility +}; + + +Socket.prototype.setMulticastInterface = function(interfaceAddress) { + healthCheck(this); + validateString(interfaceAddress, 'interfaceAddress'); + + const err = this[kStateSymbol].handle.setMulticastInterface(interfaceAddress); + if (err) { + throw new ErrnoException(err, 'setMulticastInterface'); + } +}; + +Socket.prototype.addMembership = function(multicastAddress, + interfaceAddress) { + healthCheck(this); + + if (!multicastAddress) { + throw new ERR_MISSING_ARGS('multicastAddress'); + } + + const { handle } = this[kStateSymbol]; + const err = handle.addMembership(multicastAddress, interfaceAddress); + if (err) { + throw new ErrnoException(err, 'addMembership'); + } +}; + + +Socket.prototype.dropMembership = function(multicastAddress, + interfaceAddress) { + healthCheck(this); + + if (!multicastAddress) { + throw new ERR_MISSING_ARGS('multicastAddress'); + } + + const { handle } = this[kStateSymbol]; + const err = handle.dropMembership(multicastAddress, interfaceAddress); + if (err) { + throw new ErrnoException(err, 'dropMembership'); + } +}; + +Socket.prototype.addSourceSpecificMembership = function(sourceAddress, + groupAddress, + interfaceAddress) { + healthCheck(this); + + validateString(sourceAddress, 'sourceAddress'); + validateString(groupAddress, 'groupAddress'); + + const err = + this[kStateSymbol].handle.addSourceSpecificMembership(sourceAddress, + groupAddress, + interfaceAddress); + if (err) { + throw new ErrnoException(err, 'addSourceSpecificMembership'); + } +}; + + +Socket.prototype.dropSourceSpecificMembership = function(sourceAddress, + groupAddress, + interfaceAddress) { + healthCheck(this); + + validateString(sourceAddress, 'sourceAddress'); + validateString(groupAddress, 'groupAddress'); + + const err = + this[kStateSymbol].handle.dropSourceSpecificMembership(sourceAddress, + groupAddress, + interfaceAddress); + if (err) { + throw new ErrnoException(err, 'dropSourceSpecificMembership'); + } +}; + + +function healthCheck(socket) { + if (!socket[kStateSymbol].handle) { + // Error message from dgram_legacy.js. + throw new ERR_SOCKET_DGRAM_NOT_RUNNING(); + } +} + + +function stopReceiving(socket) { + const state = socket[kStateSymbol]; + + if (!state.receiving) + return; + + state.handle.recvStop(); + state.receiving = false; +} + + +function onMessage(nread, handle, buf, rinfo) { + const self = handle[owner_symbol]; + if (nread < 0) { + return self.emit('error', new ErrnoException(nread, 'recvmsg')); + } + if (self[kStateSymbol]?.receiveBlockList?.check(rinfo.address, + rinfo.family?.toLocaleLowerCase())) { + return; + } + rinfo.size = buf.length; // compatibility + self.emit('message', buf, rinfo); +} + + +function onError(nread, handle, error) { + const self = handle[owner_symbol]; + return self.emit('error', error); +} + + +Socket.prototype.ref = function() { + const handle = this[kStateSymbol].handle; + + if (handle) + handle.ref(); + + return this; +}; + + +Socket.prototype.unref = function() { + const handle = this[kStateSymbol].handle; + + if (handle) + handle.unref(); + + return this; +}; + + +Socket.prototype.setRecvBufferSize = function(size) { + bufferSize(this, size, RECV_BUFFER); +}; + + +Socket.prototype.setSendBufferSize = function(size) { + bufferSize(this, size, SEND_BUFFER); +}; + + +Socket.prototype.getRecvBufferSize = function() { + return bufferSize(this, 0, RECV_BUFFER); +}; + + +Socket.prototype.getSendBufferSize = function() { + return bufferSize(this, 0, SEND_BUFFER); +}; + +Socket.prototype.getSendQueueSize = function() { + return this[kStateSymbol].handle.getSendQueueSize(); +}; + +Socket.prototype.getSendQueueCount = function() { + return this[kStateSymbol].handle.getSendQueueCount(); +}; + +// Deprecated private APIs. +ObjectDefineProperty(Socket.prototype, '_handle', { + __proto__: null, + get: deprecate(function() { + return this[kStateSymbol].handle; + }, 'Socket.prototype._handle is deprecated', 'DEP0112'), + set: deprecate(function(val) { + this[kStateSymbol].handle = val; + }, 'Socket.prototype._handle is deprecated', 'DEP0112'), +}); + + +ObjectDefineProperty(Socket.prototype, '_receiving', { + __proto__: null, + get: deprecate(function() { + return this[kStateSymbol].receiving; + }, 'Socket.prototype._receiving is deprecated', 'DEP0112'), + set: deprecate(function(val) { + this[kStateSymbol].receiving = val; + }, 'Socket.prototype._receiving is deprecated', 'DEP0112'), +}); + + +ObjectDefineProperty(Socket.prototype, '_bindState', { + __proto__: null, + get: deprecate(function() { + return this[kStateSymbol].bindState; + }, 'Socket.prototype._bindState is deprecated', 'DEP0112'), + set: deprecate(function(val) { + this[kStateSymbol].bindState = val; + }, 'Socket.prototype._bindState is deprecated', 'DEP0112'), +}); + + +ObjectDefineProperty(Socket.prototype, '_queue', { + __proto__: null, + get: deprecate(function() { + return this[kStateSymbol].queue; + }, 'Socket.prototype._queue is deprecated', 'DEP0112'), + set: deprecate(function(val) { + this[kStateSymbol].queue = val; + }, 'Socket.prototype._queue is deprecated', 'DEP0112'), +}); + + +ObjectDefineProperty(Socket.prototype, '_reuseAddr', { + __proto__: null, + get: deprecate(function() { + return this[kStateSymbol].reuseAddr; + }, 'Socket.prototype._reuseAddr is deprecated', 'DEP0112'), + set: deprecate(function(val) { + this[kStateSymbol].reuseAddr = val; + }, 'Socket.prototype._reuseAddr is deprecated', 'DEP0112'), +}); + + +Socket.prototype._healthCheck = deprecate(function() { + healthCheck(this); +}, 'Socket.prototype._healthCheck() is deprecated', 'DEP0112'); + + +Socket.prototype._stopReceiving = deprecate(function() { + stopReceiving(this); +}, 'Socket.prototype._stopReceiving() is deprecated', 'DEP0112'); + + +// Legacy alias on the C++ wrapper object. This is not public API, so we may +// want to runtime-deprecate it at some point. There's no hurry, though. +ObjectDefineProperty(UDP.prototype, 'owner', { + __proto__: null, + get() { return this[owner_symbol]; }, + set(v) { return this[owner_symbol] = v; }, +}); + + +module.exports = { + _createSocketHandle: deprecate( + _createSocketHandle, + 'dgram._createSocketHandle() is deprecated', + 'DEP0112', + ), + createSocket, + Socket, +}; \ No newline at end of file diff --git a/.codesandbox/node/diagnostics_channel.js b/.codesandbox/node/diagnostics_channel.js new file mode 100644 index 00000000..1422b0db --- /dev/null +++ b/.codesandbox/node/diagnostics_channel.js @@ -0,0 +1,439 @@ +"use strict"; + +const { + ArrayPrototypeAt, + ArrayPrototypeIndexOf, + ArrayPrototypePush, + ArrayPrototypePushApply, + ArrayPrototypeSlice, + ArrayPrototypeSplice, + ObjectDefineProperty, + ObjectGetPrototypeOf, + ObjectSetPrototypeOf, + Promise, + PromisePrototypeThen, + PromiseReject, + PromiseResolve, + ReflectApply, + SafeFinalizationRegistry, + SafeMap, + SymbolHasInstance, +} = primordials; + +const { + codes: { ERR_INVALID_ARG_TYPE }, +} = require("internal/errors"); +const { validateFunction } = require("internal/validators"); + +const { triggerUncaughtException } = internalBinding("errors"); + +const { WeakReference } = require("internal/util"); + +// Can't delete when weakref count reaches 0 as it could increment again. +// Only GC can be used as a valid time to clean up the channels map. +class WeakRefMap extends SafeMap { + #finalizers = new SafeFinalizationRegistry((key) => { + // Check that the key doesn't have any value before deleting, as the WeakRef for the key + // may have been replaced since finalization callbacks aren't synchronous with GC. + if (!this.has(key)) this.delete(key); + }); + + set(key, value) { + this.#finalizers.register(value, key); + return super.set(key, new WeakReference(value)); + } + + get(key) { + return super.get(key)?.get(); + } + + has(key) { + return !!this.get(key); + } + + incRef(key) { + return super.get(key)?.incRef(); + } + + decRef(key) { + return super.get(key)?.decRef(); + } +} + +function markActive(channel) { + // eslint-disable-next-line no-use-before-define + ObjectSetPrototypeOf(channel, ActiveChannel.prototype); + channel._subscribers = []; + channel._stores = new SafeMap(); +} + +function maybeMarkInactive(channel) { + // When there are no more active subscribers or bound, restore to fast prototype. + if (!channel._subscribers.length && !channel._stores.size) { + // eslint-disable-next-line no-use-before-define + ObjectSetPrototypeOf(channel, Channel.prototype); + channel._subscribers = undefined; + channel._stores = undefined; + } +} + +function defaultTransform(data) { + return data; +} + +function wrapStoreRun(store, data, next, transform = defaultTransform) { + return () => { + let context; + try { + context = transform(data); + } catch (err) { + process.nextTick(() => { + triggerUncaughtException(err, false); + }); + return next(); + } + + return store.run(context, next); + }; +} + +// TODO(qard): should there be a C++ channel interface? +class ActiveChannel { + subscribe(subscription) { + validateFunction(subscription, "subscription"); + this._subscribers = ArrayPrototypeSlice(this._subscribers); + ArrayPrototypePush(this._subscribers, subscription); + channels.incRef(this.name); + } + + unsubscribe(subscription) { + const index = ArrayPrototypeIndexOf(this._subscribers, subscription); + if (index === -1) return false; + + const before = ArrayPrototypeSlice(this._subscribers, 0, index); + const after = ArrayPrototypeSlice(this._subscribers, index + 1); + this._subscribers = before; + ArrayPrototypePushApply(this._subscribers, after); + + channels.decRef(this.name); + maybeMarkInactive(this); + + return true; + } + + bindStore(store, transform) { + const replacing = this._stores.has(store); + if (!replacing) channels.incRef(this.name); + this._stores.set(store, transform); + } + + unbindStore(store) { + if (!this._stores.has(store)) { + return false; + } + + this._stores.delete(store); + + channels.decRef(this.name); + maybeMarkInactive(this); + + return true; + } + + get hasSubscribers() { + return true; + } + + publish(data) { + const subscribers = this._subscribers; + for (let i = 0; i < (subscribers?.length || 0); i++) { + try { + const onMessage = subscribers[i]; + onMessage(data, this.name); + } catch (err) { + process.nextTick(() => { + triggerUncaughtException(err, false); + }); + } + } + } + + runStores(data, fn, thisArg, ...args) { + let run = () => { + this.publish(data); + return ReflectApply(fn, thisArg, args); + }; + + for (const entry of this._stores.entries()) { + const store = entry[0]; + const transform = entry[1]; + run = wrapStoreRun(store, data, run, transform); + } + + return run(); + } +} + +class Channel { + constructor(name) { + this._subscribers = undefined; + this._stores = undefined; + this.name = name; + + channels.set(name, this); + } + + static [SymbolHasInstance](instance) { + const prototype = ObjectGetPrototypeOf(instance); + return ( + prototype === Channel.prototype || prototype === ActiveChannel.prototype + ); + } + + subscribe(subscription) { + markActive(this); + this.subscribe(subscription); + } + + unsubscribe() { + return false; + } + + bindStore(store, transform) { + markActive(this); + this.bindStore(store, transform); + } + + unbindStore() { + return false; + } + + get hasSubscribers() { + return false; + } + + publish() {} + + runStores(data, fn, thisArg, ...args) { + return ReflectApply(fn, thisArg, args); + } +} + +const channels = new WeakRefMap(); + +function channel(name) { + const channel = channels.get(name); + if (channel) return channel; + + if (typeof name !== "string" && typeof name !== "symbol") { + throw new ERR_INVALID_ARG_TYPE("channel", ["string", "symbol"], name); + } + + return new Channel(name); +} + +function subscribe(name, subscription) { + return channel(name).subscribe(subscription); +} + +function unsubscribe(name, subscription) { + return channel(name).unsubscribe(subscription); +} + +function hasSubscribers(name) { + const channel = channels.get(name); + if (!channel) return false; + + return channel.hasSubscribers; +} + +const traceEvents = ["start", "end", "asyncStart", "asyncEnd", "error"]; + +function assertChannel(value, name) { + if (!(value instanceof Channel)) { + throw new ERR_INVALID_ARG_TYPE(name, ["Channel"], value); + } +} + +function tracingChannelFrom(nameOrChannels, name) { + if (typeof nameOrChannels === "string") { + return channel(`tracing:${nameOrChannels}:${name}`); + } + + if (typeof nameOrChannels === "object" && nameOrChannels !== null) { + const channel = nameOrChannels[name]; + assertChannel(channel, `nameOrChannels.${name}`); + return channel; + } + + throw new ERR_INVALID_ARG_TYPE( + "nameOrChannels", + ["string", "object", "TracingChannel"], + nameOrChannels + ); +} + +class TracingChannel { + constructor(nameOrChannels) { + for (let i = 0; i < traceEvents.length; ++i) { + const eventName = traceEvents[i]; + ObjectDefineProperty(this, eventName, { + __proto__: null, + value: tracingChannelFrom(nameOrChannels, eventName), + }); + } + } + + get hasSubscribers() { + return ( + this.start?.hasSubscribers || + this.end?.hasSubscribers || + this.asyncStart?.hasSubscribers || + this.asyncEnd?.hasSubscribers || + this.error?.hasSubscribers + ); + } + + subscribe(handlers) { + for (let i = 0; i < traceEvents.length; ++i) { + const name = traceEvents[i]; + if (!handlers[name]) continue; + + this[name]?.subscribe(handlers[name]); + } + } + + unsubscribe(handlers) { + let done = true; + + for (let i = 0; i < traceEvents.length; ++i) { + const name = traceEvents[i]; + if (!handlers[name]) continue; + + if (!this[name]?.unsubscribe(handlers[name])) { + done = false; + } + } + + return done; + } + + traceSync(fn, context = {}, thisArg, ...args) { + if (!this.hasSubscribers) { + return ReflectApply(fn, thisArg, args); + } + + const { start, end, error } = this; + + return start.runStores(context, () => { + try { + const result = ReflectApply(fn, thisArg, args); + context.result = result; + return result; + } catch (err) { + context.error = err; + error.publish(context); + throw err; + } finally { + end.publish(context); + } + }); + } + + tracePromise(fn, context = {}, thisArg, ...args) { + if (!this.hasSubscribers) { + return ReflectApply(fn, thisArg, args); + } + + const { start, end, asyncStart, asyncEnd, error } = this; + + function reject(err) { + context.error = err; + error.publish(context); + asyncStart.publish(context); + // TODO: Is there a way to have asyncEnd _after_ the continuation? + asyncEnd.publish(context); + return PromiseReject(err); + } + + function resolve(result) { + context.result = result; + asyncStart.publish(context); + // TODO: Is there a way to have asyncEnd _after_ the continuation? + asyncEnd.publish(context); + return result; + } + + return start.runStores(context, () => { + try { + let promise = ReflectApply(fn, thisArg, args); + // Convert thenables to native promises + if (!(promise instanceof Promise)) { + promise = PromiseResolve(promise); + } + return PromisePrototypeThen(promise, resolve, reject); + } catch (err) { + context.error = err; + error.publish(context); + throw err; + } finally { + end.publish(context); + } + }); + } + + traceCallback(fn, position = -1, context = {}, thisArg, ...args) { + if (!this.hasSubscribers) { + return ReflectApply(fn, thisArg, args); + } + + const { start, end, asyncStart, asyncEnd, error } = this; + + function wrappedCallback(err, res) { + if (err) { + context.error = err; + error.publish(context); + } else { + context.result = res; + } + + // Using runStores here enables manual context failure recovery + asyncStart.runStores(context, () => { + try { + return ReflectApply(callback, this, arguments); + } finally { + asyncEnd.publish(context); + } + }); + } + + const callback = ArrayPrototypeAt(args, position); + validateFunction(callback, "callback"); + ArrayPrototypeSplice(args, position, 1, wrappedCallback); + + return start.runStores(context, () => { + try { + return ReflectApply(fn, thisArg, args); + } catch (err) { + context.error = err; + error.publish(context); + throw err; + } finally { + end.publish(context); + } + }); + } +} + +function tracingChannel(nameOrChannels) { + return new TracingChannel(nameOrChannels); +} + +module.exports = { + channel, + hasSubscribers, + subscribe, + tracingChannel, + unsubscribe, + Channel, +}; diff --git a/.codesandbox/node/dns.js b/.codesandbox/node/dns.js new file mode 100644 index 00000000..22d23dea --- /dev/null +++ b/.codesandbox/node/dns.js @@ -0,0 +1,345 @@ +"use strict"; + +const { ObjectDefineProperties, ObjectDefineProperty, Symbol } = primordials; + +const cares = internalBinding("cares_wrap"); +const { isIP } = require("internal/net"); +const { customPromisifyArgs } = require("internal/util"); +const { + DNSException, + codes: { ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS }, +} = require("internal/errors"); +const { + bindDefaultResolver, + setDefaultResolver, + validateHints, + getDefaultResultOrder, + setDefaultResultOrder, + errorCodes: dnsErrorCodes, + validDnsOrders, + validFamilies, +} = require("internal/dns/utils"); +const { Resolver } = require("internal/dns/callback_resolver"); +const { + NODATA, + FORMERR, + SERVFAIL, + NOTFOUND, + NOTIMP, + REFUSED, + BADQUERY, + BADNAME, + BADFAMILY, + BADRESP, + CONNREFUSED, + TIMEOUT, + EOF, + FILE, + NOMEM, + DESTRUCTION, + BADSTR, + BADFLAGS, + NONAME, + BADHINTS, + NOTINITIALIZED, + LOADIPHLPAPI, + ADDRGETNETWORKPARAMS, + CANCELLED, +} = dnsErrorCodes; +const { + validateBoolean, + validateFunction, + validateNumber, + validateOneOf, + validatePort, + validateString, +} = require("internal/validators"); + +const { + GetAddrInfoReqWrap, + GetNameInfoReqWrap, + DNS_ORDER_VERBATIM, + DNS_ORDER_IPV4_FIRST, + DNS_ORDER_IPV6_FIRST, +} = cares; + +const kPerfHooksDnsLookupContext = Symbol("kPerfHooksDnsLookupContext"); +const kPerfHooksDnsLookupServiceContext = Symbol( + "kPerfHooksDnsLookupServiceContext" +); + +const { hasObserver, startPerf, stopPerf } = require("internal/perf/observe"); + +let promises = null; // Lazy loaded + +function onlookup(err, addresses) { + if (err) { + return this.callback(new DNSException(err, "getaddrinfo", this.hostname)); + } + this.callback(null, addresses[0], this.family || isIP(addresses[0])); + if (this[kPerfHooksDnsLookupContext] && hasObserver("dns")) { + stopPerf(this, kPerfHooksDnsLookupContext, { detail: { addresses } }); + } +} + +function onlookupall(err, addresses) { + if (err) { + return this.callback(new DNSException(err, "getaddrinfo", this.hostname)); + } + + const family = this.family; + for (let i = 0; i < addresses.length; i++) { + const addr = addresses[i]; + addresses[i] = { + address: addr, + family: family || isIP(addr), + }; + } + + this.callback(null, addresses); + if (this[kPerfHooksDnsLookupContext] && hasObserver("dns")) { + stopPerf(this, kPerfHooksDnsLookupContext, { detail: { addresses } }); + } +} + +// Easy DNS A/AAAA look up +// lookup(hostname, [options,] callback) +function lookup(hostname, options, callback) { + let hints = 0; + let family = 0; + let all = false; + let dnsOrder = getDefaultResultOrder(); + + // Parse arguments + if (hostname) { + validateString(hostname, "hostname"); + } + + if (typeof options === "function") { + callback = options; + family = 0; + } else if (typeof options === "number") { + validateFunction(callback, "callback"); + + validateOneOf(options, "family", validFamilies); + family = options; + } else if (options !== undefined && typeof options !== "object") { + validateFunction(arguments.length === 2 ? options : callback, "callback"); + throw new ERR_INVALID_ARG_TYPE("options", ["integer", "object"], options); + } else { + validateFunction(callback, "callback"); + + if (options?.hints != null) { + validateNumber(options.hints, "options.hints"); + hints = options.hints >>> 0; + validateHints(hints); + } + if (options?.family != null) { + switch (options.family) { + case "IPv4": + family = 4; + break; + case "IPv6": + family = 6; + break; + default: + validateOneOf(options.family, "options.family", validFamilies); + family = options.family; + break; + } + } + if (options?.all != null) { + validateBoolean(options.all, "options.all"); + all = options.all; + } + if (options?.verbatim != null) { + validateBoolean(options.verbatim, "options.verbatim"); + dnsOrder = options.verbatim ? "verbatim" : "ipv4first"; + } + if (options?.order != null) { + validateOneOf(options.order, "options.order", validDnsOrders); + dnsOrder = options.order; + } + } + + if (!hostname) { + throw new ERR_INVALID_ARG_VALUE( + "hostname", + hostname, + "must be a non-empty string" + ); + } + + const matchedFamily = isIP(hostname); + if (matchedFamily) { + if (all) { + process.nextTick(callback, null, [ + { address: hostname, family: matchedFamily }, + ]); + } else { + process.nextTick(callback, null, hostname, matchedFamily); + } + return {}; + } + + const req = new GetAddrInfoReqWrap(); + req.callback = callback; + req.family = family; + req.hostname = hostname; + req.oncomplete = all ? onlookupall : onlookup; + + let order = DNS_ORDER_VERBATIM; + + if (dnsOrder === "ipv4first") { + order = DNS_ORDER_IPV4_FIRST; + } else if (dnsOrder === "ipv6first") { + order = DNS_ORDER_IPV6_FIRST; + } + + const err = cares.getaddrinfo(req, hostname, family, hints, order); + if (err) { + process.nextTick(callback, new DNSException(err, "getaddrinfo", hostname)); + return {}; + } + if (hasObserver("dns")) { + const detail = { + hostname, + family, + hints, + verbatim: order === DNS_ORDER_VERBATIM, + order: dnsOrder, + }; + + startPerf(req, kPerfHooksDnsLookupContext, { + type: "dns", + name: "lookup", + detail, + }); + } + return req; +} + +ObjectDefineProperty(lookup, customPromisifyArgs, { + __proto__: null, + value: ["address", "family"], + enumerable: false, +}); + +function onlookupservice(err, hostname, service) { + if (err) + return this.callback(new DNSException(err, "getnameinfo", this.hostname)); + + this.callback(null, hostname, service); + if (this[kPerfHooksDnsLookupServiceContext] && hasObserver("dns")) { + stopPerf(this, kPerfHooksDnsLookupServiceContext, { + detail: { hostname, service }, + }); + } +} + +function lookupService(address, port, callback) { + if (arguments.length !== 3) + throw new ERR_MISSING_ARGS("address", "port", "callback"); + + if (isIP(address) === 0) throw new ERR_INVALID_ARG_VALUE("address", address); + + validatePort(port); + + validateFunction(callback, "callback"); + + port = +port; + + const req = new GetNameInfoReqWrap(); + req.callback = callback; + req.hostname = address; + req.port = port; + req.oncomplete = onlookupservice; + + const err = cares.getnameinfo(req, address, port); + if (err) throw new DNSException(err, "getnameinfo", address); + if (hasObserver("dns")) { + startPerf(req, kPerfHooksDnsLookupServiceContext, { + type: "dns", + name: "lookupService", + detail: { + host: address, + port, + }, + }); + } + return req; +} + +ObjectDefineProperty(lookupService, customPromisifyArgs, { + __proto__: null, + value: ["hostname", "service"], + enumerable: false, +}); + +function defaultResolverSetServers(servers) { + const resolver = new Resolver(); + + resolver.setServers(servers); + setDefaultResolver(resolver); + bindDefaultResolver(module.exports, Resolver.prototype); + + if (promises !== null) + bindDefaultResolver(promises, promises.Resolver.prototype); +} + +module.exports = { + lookup, + lookupService, + + Resolver, + getDefaultResultOrder, + setDefaultResultOrder, + setServers: defaultResolverSetServers, + + // uv_getaddrinfo flags + ADDRCONFIG: cares.AI_ADDRCONFIG, + ALL: cares.AI_ALL, + V4MAPPED: cares.AI_V4MAPPED, + + // ERROR CODES + NODATA, + FORMERR, + SERVFAIL, + NOTFOUND, + NOTIMP, + REFUSED, + BADQUERY, + BADNAME, + BADFAMILY, + BADRESP, + CONNREFUSED, + TIMEOUT, + EOF, + FILE, + NOMEM, + DESTRUCTION, + BADSTR, + BADFLAGS, + NONAME, + BADHINTS, + NOTINITIALIZED, + LOADIPHLPAPI, + ADDRGETNETWORKPARAMS, + CANCELLED, +}; + +bindDefaultResolver(module.exports, Resolver.prototype); + +ObjectDefineProperties(module.exports, { + promises: { + __proto__: null, + configurable: true, + enumerable: true, + get() { + if (promises === null) { + promises = require("internal/dns/promises"); + } + return promises; + }, + }, +}); diff --git a/.codesandbox/node/domain.js b/.codesandbox/node/domain.js new file mode 100644 index 00000000..29aefdb9 --- /dev/null +++ b/.codesandbox/node/domain.js @@ -0,0 +1,529 @@ +"use strict"; + +// WARNING: THIS MODULE IS PENDING DEPRECATION. +// +// No new pull requests targeting this module will be accepted +// unless they address existing, critical bugs. + +const { + ArrayPrototypeEvery, + ArrayPrototypeIndexOf, + ArrayPrototypeLastIndexOf, + ArrayPrototypePush, + ArrayPrototypeSlice, + ArrayPrototypeSplice, + Error, + FunctionPrototypeCall, + ObjectDefineProperty, + Promise, + ReflectApply, + SafeMap, + SafeWeakMap, + StringPrototypeRepeat, + Symbol, +} = primordials; + +const EventEmitter = require("events"); +const { + ERR_DOMAIN_CALLBACK_NOT_AVAILABLE, + ERR_DOMAIN_CANNOT_SET_UNCAUGHT_EXCEPTION_CAPTURE, + ERR_UNHANDLED_ERROR, +} = require("internal/errors").codes; +const { createHook } = require("async_hooks"); +const { useDomainTrampoline } = require("internal/async_hooks"); + +const kWeak = Symbol("kWeak"); +const { WeakReference } = require("internal/util"); + +// Overwrite process.domain with a getter/setter that will allow for more +// effective optimizations +const _domain = [null]; +ObjectDefineProperty(process, "domain", { + __proto__: null, + enumerable: true, + get: function () { + return _domain[0]; + }, + set: function (arg) { + return (_domain[0] = arg); + }, +}); + +const vmPromises = new SafeWeakMap(); +const pairing = new SafeMap(); +const asyncHook = createHook({ + init(asyncId, type, triggerAsyncId, resource) { + if (process.domain !== null && process.domain !== undefined) { + // If this operation is created while in a domain, let's mark it + pairing.set(asyncId, process.domain[kWeak]); + // Promises from other contexts, such as with the VM module, should not + // have a domain property as it can be used to escape the sandbox. + if (type !== "PROMISE" || resource instanceof Promise) { + ObjectDefineProperty(resource, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: process.domain, + writable: true, + }); + // Because promises from other contexts don't get a domain field, + // the domain needs to be held alive another way. Stuffing it in a + // weakmap connected to the promise lifetime can fix that. + } else { + vmPromises.set(resource, process.domain); + } + } + }, + before(asyncId) { + const current = pairing.get(asyncId); + if (current !== undefined) { + // Enter domain for this cb + // We will get the domain through current.get(), because the resource + // object's .domain property makes sure it is not garbage collected. + // However, we do need to make the reference to the domain non-weak, + // so that it cannot be garbage collected before the after() hook. + current.incRef(); + current.get().enter(); + } + }, + after(asyncId) { + const current = pairing.get(asyncId); + if (current !== undefined) { + // Exit domain for this cb + const domain = current.get(); + current.decRef(); + domain.exit(); + } + }, + destroy(asyncId) { + pairing.delete(asyncId); // cleaning up + }, +}); + +// When domains are in use, they claim full ownership of the +// uncaught exception capture callback. +if (process.hasUncaughtExceptionCaptureCallback()) { + throw new ERR_DOMAIN_CALLBACK_NOT_AVAILABLE(); +} + +// Get the stack trace at the point where `domain` was required. +// eslint-disable-next-line no-restricted-syntax +const domainRequireStack = new Error("require(`domain`) at this point").stack; + +const { setUncaughtExceptionCaptureCallback } = process; +process.setUncaughtExceptionCaptureCallback = function (fn) { + const err = new ERR_DOMAIN_CANNOT_SET_UNCAUGHT_EXCEPTION_CAPTURE(); + err.stack += `\n${StringPrototypeRepeat("-", 40)}\n${domainRequireStack}`; + throw err; +}; + +let sendMakeCallbackDeprecation = false; +function emitMakeCallbackDeprecation({ target, method }) { + if (!sendMakeCallbackDeprecation) { + process.emitWarning( + "Using a domain property in MakeCallback is deprecated. Use the " + + "async_context variant of MakeCallback or the AsyncResource class " + + "instead. " + + `(Triggered by calling ${method?.name || ""} ` + + `on ${target?.constructor?.name}.)`, + "DeprecationWarning", + "DEP0097" + ); + sendMakeCallbackDeprecation = true; + } +} + +function topLevelDomainCallback(cb, ...args) { + const domain = this.domain; + if (exports.active && domain) + emitMakeCallbackDeprecation({ target: this, method: cb }); + + if (domain) domain.enter(); + const ret = ReflectApply(cb, this, args); + if (domain) domain.exit(); + + return ret; +} + +// It's possible to enter one domain while already inside +// another one. The stack is each entered domain. +let stack = []; +exports._stack = stack; +useDomainTrampoline(topLevelDomainCallback); + +function updateExceptionCapture() { + if ( + ArrayPrototypeEvery(stack, (domain) => domain.listenerCount("error") === 0) + ) { + setUncaughtExceptionCaptureCallback(null); + } else { + setUncaughtExceptionCaptureCallback(null); + setUncaughtExceptionCaptureCallback((er) => { + return process.domain._errorHandler(er); + }); + } +} + +process.on("newListener", (name, listener) => { + if ( + name === "uncaughtException" && + listener !== domainUncaughtExceptionClear + ) { + // Make sure the first listener for `uncaughtException` always clears + // the domain stack. + process.removeListener(name, domainUncaughtExceptionClear); + process.prependListener(name, domainUncaughtExceptionClear); + } +}); + +process.on("removeListener", (name, listener) => { + if ( + name === "uncaughtException" && + listener !== domainUncaughtExceptionClear + ) { + // If the domain listener would be the only remaining one, remove it. + const listeners = process.listeners("uncaughtException"); + if (listeners.length === 1 && listeners[0] === domainUncaughtExceptionClear) + process.removeListener(name, domainUncaughtExceptionClear); + } +}); + +function domainUncaughtExceptionClear() { + stack.length = 0; + exports.active = process.domain = null; + updateExceptionCapture(); +} + +class Domain extends EventEmitter { + constructor() { + super(); + + this.members = []; + this[kWeak] = new WeakReference(this); + asyncHook.enable(); + + this.on("removeListener", updateExceptionCapture); + this.on("newListener", updateExceptionCapture); + } +} + +exports.Domain = Domain; + +exports.create = exports.createDomain = function createDomain() { + return new Domain(); +}; + +// The active domain is always the one that we're currently in. +exports.active = null; +Domain.prototype.members = undefined; + +// Called by process._fatalException in case an error was thrown. +Domain.prototype._errorHandler = function (er) { + let caught = false; + + if ((typeof er === "object" && er !== null) || typeof er === "function") { + ObjectDefineProperty(er, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: this, + writable: true, + }); + er.domainThrown = true; + } + // Pop all adjacent duplicates of the currently active domain from the stack. + // This is done to prevent a domain's error handler to run within the context + // of itself, and re-entering itself recursively handler as a result of an + // exception thrown in its context. + while (exports.active === this) { + this.exit(); + } + + // The top-level domain-handler is handled separately. + // + // The reason is that if V8 was passed a command line option + // asking it to abort on an uncaught exception (currently + // "--abort-on-uncaught-exception"), we want an uncaught exception + // in the top-level domain error handler to make the + // process abort. Using try/catch here would always make V8 think + // that these exceptions are caught, and thus would prevent it from + // aborting in these cases. + if (stack.length === 0) { + // If there's no error handler, do not emit an 'error' event + // as this would throw an error, make the process exit, and thus + // prevent the process 'uncaughtException' event from being emitted + // if a listener is set. + if (this.listenerCount("error") > 0) { + // Clear the uncaughtExceptionCaptureCallback so that we know that, since + // the top-level domain is not active anymore, it would be ok to abort on + // an uncaught exception at this point + setUncaughtExceptionCaptureCallback(null); + try { + caught = this.emit("error", er); + } finally { + updateExceptionCapture(); + } + } + } else { + // Wrap this in a try/catch so we don't get infinite throwing + try { + // One of three things will happen here. + // + // 1. There is a handler, caught = true + // 2. There is no handler, caught = false + // 3. It throws, caught = false + // + // If caught is false after this, then there's no need to exit() + // the domain, because we're going to crash the process anyway. + caught = this.emit("error", er); + } catch (er2) { + // The domain error handler threw! oh no! + // See if another domain can catch THIS error, + // or else crash on the original one. + updateExceptionCapture(); + if (stack.length) { + exports.active = process.domain = stack[stack.length - 1]; + caught = process.domain._errorHandler(er2); + } else { + // Pass on to the next exception handler. + throw er2; + } + } + } + + // Exit all domains on the stack. Uncaught exceptions end the + // current tick and no domains should be left on the stack + // between ticks. + domainUncaughtExceptionClear(); + + return caught; +}; + +Domain.prototype.enter = function () { + // Note that this might be a no-op, but we still need + // to push it onto the stack so that we can pop it later. + exports.active = process.domain = this; + ArrayPrototypePush(stack, this); + updateExceptionCapture(); +}; + +Domain.prototype.exit = function () { + // Don't do anything if this domain is not on the stack. + const index = ArrayPrototypeLastIndexOf(stack, this); + if (index === -1) return; + + // Exit all domains until this one. + ArrayPrototypeSplice(stack, index); + + exports.active = stack.length === 0 ? undefined : stack[stack.length - 1]; + process.domain = exports.active; + updateExceptionCapture(); +}; + +// note: this works for timers as well. +Domain.prototype.add = function (ee) { + // If the domain is already added, then nothing left to do. + if (ee.domain === this) return; + + // Has a domain already - remove it first. + if (ee.domain) ee.domain.remove(ee); + + // Check for circular Domain->Domain links. + // They cause big issues. + // + // For example: + // var d = domain.create(); + // var e = domain.create(); + // d.add(e); + // e.add(d); + // e.emit('error', er); // RangeError, stack overflow! + if (this.domain && ee instanceof Domain) { + for (let d = this.domain; d; d = d.domain) { + if (ee === d) return; + } + } + + ObjectDefineProperty(ee, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: this, + writable: true, + }); + ArrayPrototypePush(this.members, ee); +}; + +Domain.prototype.remove = function (ee) { + ee.domain = null; + const index = ArrayPrototypeIndexOf(this.members, ee); + if (index !== -1) ArrayPrototypeSplice(this.members, index, 1); +}; + +Domain.prototype.run = function (fn) { + this.enter(); + const ret = ReflectApply(fn, this, ArrayPrototypeSlice(arguments, 1)); + this.exit(); + + return ret; +}; + +function intercepted(_this, self, cb, fnargs) { + if (fnargs[0] && fnargs[0] instanceof Error) { + const er = fnargs[0]; + er.domainBound = cb; + er.domainThrown = false; + ObjectDefineProperty(er, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: self, + writable: true, + }); + self.emit("error", er); + return; + } + + self.enter(); + const ret = ReflectApply(cb, _this, ArrayPrototypeSlice(fnargs, 1)); + self.exit(); + + return ret; +} + +Domain.prototype.intercept = function (cb) { + const self = this; + + function runIntercepted() { + return intercepted(this, self, cb, arguments); + } + + return runIntercepted; +}; + +function bound(_this, self, cb, fnargs) { + self.enter(); + const ret = ReflectApply(cb, _this, fnargs); + self.exit(); + + return ret; +} + +Domain.prototype.bind = function (cb) { + const self = this; + + function runBound() { + return bound(this, self, cb, arguments); + } + + ObjectDefineProperty(runBound, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: this, + writable: true, + }); + + return runBound; +}; + +// Override EventEmitter methods to make it domain-aware. +EventEmitter.usingDomains = true; + +const eventInit = EventEmitter.init; +EventEmitter.init = function (opts) { + ObjectDefineProperty(this, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: null, + writable: true, + }); + if (exports.active && !(this instanceof exports.Domain)) { + this.domain = exports.active; + } + + return FunctionPrototypeCall(eventInit, this, opts); +}; + +const eventEmit = EventEmitter.prototype.emit; +EventEmitter.prototype.emit = function emit(...args) { + const domain = this.domain; + + const type = args[0]; + const shouldEmitError = type === "error" && this.listenerCount(type) > 0; + + // Just call original `emit` if current EE instance has `error` + // handler, there's no active domain or this is process + if ( + shouldEmitError || + domain === null || + domain === undefined || + this === process + ) { + return ReflectApply(eventEmit, this, args); + } + + if (type === "error") { + const er = args.length > 1 && args[1] ? args[1] : new ERR_UNHANDLED_ERROR(); + + if (typeof er === "object") { + er.domainEmitter = this; + ObjectDefineProperty(er, "domain", { + __proto__: null, + configurable: true, + enumerable: false, + value: domain, + writable: true, + }); + er.domainThrown = false; + } + + // Remove the current domain (and its duplicates) from the domains stack and + // set the active domain to its parent (if any) so that the domain's error + // handler doesn't run in its own context. This prevents any event emitter + // created or any exception thrown in that error handler from recursively + // executing that error handler. + const origDomainsStack = ArrayPrototypeSlice(stack); + const origActiveDomain = process.domain; + + // Travel the domains stack from top to bottom to find the first domain + // instance that is not a duplicate of the current active domain. + let idx = stack.length - 1; + while (idx > -1 && process.domain === stack[idx]) { + --idx; + } + + // Change the stack to not contain the current active domain, and only the + // domains above it on the stack. + if (idx < 0) { + stack.length = 0; + } else { + ArrayPrototypeSplice(stack, idx + 1); + } + + // Change the current active domain + if (stack.length > 0) { + exports.active = process.domain = stack[stack.length - 1]; + } else { + exports.active = process.domain = null; + } + + updateExceptionCapture(); + + domain.emit("error", er); + + // Now that the domain's error handler has completed, restore the domains + // stack and the active domain to their original values. + exports._stack = stack = origDomainsStack; + exports.active = process.domain = origActiveDomain; + updateExceptionCapture(); + + return false; + } + + domain.enter(); + const ret = ReflectApply(eventEmit, this, args); + domain.exit(); + + return ret; +}; diff --git a/.codesandbox/node/events.js b/.codesandbox/node/events.js new file mode 100644 index 00000000..fdb1605e --- /dev/null +++ b/.codesandbox/node/events.js @@ -0,0 +1,1244 @@ +"use strict"; + +const { + ArrayPrototypeJoin, + ArrayPrototypePop, + ArrayPrototypePush, + ArrayPrototypeSlice, + ArrayPrototypeSplice, + ArrayPrototypeUnshift, + AsyncIteratorPrototype, + Boolean, + Error, + ErrorCaptureStackTrace, + FunctionPrototypeBind, + NumberMAX_SAFE_INTEGER, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectGetPrototypeOf, + ObjectSetPrototypeOf, + Promise, + PromiseReject, + PromiseResolve, + ReflectApply, + ReflectOwnKeys, + String, + StringPrototypeSplit, + Symbol, + SymbolAsyncIterator, + SymbolDispose, + SymbolFor, +} = primordials; +const kRejection = SymbolFor("nodejs.rejection"); + +const { kEmptyObject, spliceOne } = require("internal/util"); + +const { inspect, identicalSequenceRange } = require("internal/util/inspect"); + +let FixedQueue; +let kFirstEventParam; +let kResistStopPropagation; + +const { + AbortError, + codes: { ERR_INVALID_ARG_TYPE, ERR_UNHANDLED_ERROR }, + genericNodeError, + kEnhanceStackBeforeInspector, +} = require("internal/errors"); + +const { + validateInteger, + validateAbortSignal, + validateBoolean, + validateFunction, + validateNumber, + validateObject, + validateString, +} = require("internal/validators"); +const { addAbortListener } = require("internal/events/abort_listener"); + +const kCapture = Symbol("kCapture"); +const kErrorMonitor = Symbol("events.errorMonitor"); +const kShapeMode = Symbol("shapeMode"); +const kMaxEventTargetListeners = Symbol("events.maxEventTargetListeners"); +const kMaxEventTargetListenersWarned = Symbol( + "events.maxEventTargetListenersWarned" +); +const kWatermarkData = SymbolFor("nodejs.watermarkData"); + +let EventEmitterAsyncResource; +// The EventEmitterAsyncResource has to be initialized lazily because event.js +// is loaded so early in the bootstrap process, before async_hooks is available. +// +// This implementation was adapted straight from addaleax's +// eventemitter-asyncresource MIT-licensed userland module. +// https://github.com/addaleax/eventemitter-asyncresource +function lazyEventEmitterAsyncResource() { + if (EventEmitterAsyncResource === undefined) { + const { AsyncResource } = require("async_hooks"); + + class EventEmitterReferencingAsyncResource extends AsyncResource { + #eventEmitter; + + /** + * @param {EventEmitter} ee + * @param {string} [type] + * @param {{ + * triggerAsyncId?: number, + * requireManualDestroy?: boolean, + * }} [options] + */ + constructor(ee, type, options) { + super(type, options); + this.#eventEmitter = ee; + } + + /** + * @type {EventEmitter} + */ + get eventEmitter() { + return this.#eventEmitter; + } + } + + EventEmitterAsyncResource = class EventEmitterAsyncResource extends ( + EventEmitter + ) { + #asyncResource; + + /** + * @param {{ + * name?: string, + * triggerAsyncId?: number, + * requireManualDestroy?: boolean, + * }} [options] + */ + constructor(options = undefined) { + let name; + if (typeof options === "string") { + name = options; + options = undefined; + } else { + if (new.target === EventEmitterAsyncResource) { + validateString(options?.name, "options.name"); + } + name = options?.name || new.target.name; + } + super(options); + + this.#asyncResource = new EventEmitterReferencingAsyncResource( + this, + name, + options + ); + } + + /** + * @param {symbol|string} event + * @param {any[]} args + * @returns {boolean} + */ + emit(event, ...args) { + const asyncResource = this.#asyncResource; + ArrayPrototypeUnshift(args, super.emit, this, event); + return ReflectApply(asyncResource.runInAsyncScope, asyncResource, args); + } + + /** + * @returns {void} + */ + emitDestroy() { + this.#asyncResource.emitDestroy(); + } + + /** + * @type {number} + */ + get asyncId() { + return this.#asyncResource.asyncId(); + } + + /** + * @type {number} + */ + get triggerAsyncId() { + return this.#asyncResource.triggerAsyncId(); + } + + /** + * @type {EventEmitterReferencingAsyncResource} + */ + get asyncResource() { + return this.#asyncResource; + } + }; + } + return EventEmitterAsyncResource; +} + +/** + * Creates a new `EventEmitter` instance. + * @param {{ captureRejections?: boolean; }} [opts] + * @constructs EventEmitter + */ +function EventEmitter(opts) { + EventEmitter.init.call(this, opts); +} +module.exports = EventEmitter; +module.exports.addAbortListener = addAbortListener; +module.exports.once = once; +module.exports.on = on; +module.exports.getEventListeners = getEventListeners; +module.exports.getMaxListeners = getMaxListeners; +module.exports.listenerCount = listenerCount; +// Backwards-compat with node 0.10.x +EventEmitter.EventEmitter = EventEmitter; + +EventEmitter.usingDomains = false; + +EventEmitter.captureRejectionSymbol = kRejection; +ObjectDefineProperty(EventEmitter, "captureRejections", { + __proto__: null, + get() { + return EventEmitter.prototype[kCapture]; + }, + set(value) { + validateBoolean(value, "EventEmitter.captureRejections"); + + EventEmitter.prototype[kCapture] = value; + }, + enumerable: true, +}); + +ObjectDefineProperty(EventEmitter, "EventEmitterAsyncResource", { + __proto__: null, + enumerable: true, + get: lazyEventEmitterAsyncResource, + set: undefined, + configurable: true, +}); + +EventEmitter.errorMonitor = kErrorMonitor; + +// The default for captureRejections is false +ObjectDefineProperty(EventEmitter.prototype, kCapture, { + __proto__: null, + value: false, + writable: true, + enumerable: false, +}); + +EventEmitter.prototype._events = undefined; +EventEmitter.prototype._eventsCount = 0; +EventEmitter.prototype._maxListeners = undefined; + +// By default EventEmitters will print a warning if more than 10 listeners are +// added to it. This is a useful default which helps finding memory leaks. +let defaultMaxListeners = 10; +let isEventTarget; + +function checkListener(listener) { + validateFunction(listener, "listener"); +} + +ObjectDefineProperty(EventEmitter, "defaultMaxListeners", { + __proto__: null, + enumerable: true, + get: function () { + return defaultMaxListeners; + }, + set: function (arg) { + validateNumber(arg, "defaultMaxListeners", 0); + defaultMaxListeners = arg; + }, +}); + +ObjectDefineProperties(EventEmitter, { + kMaxEventTargetListeners: { + __proto__: null, + value: kMaxEventTargetListeners, + enumerable: false, + configurable: false, + writable: false, + }, + kMaxEventTargetListenersWarned: { + __proto__: null, + value: kMaxEventTargetListenersWarned, + enumerable: false, + configurable: false, + writable: false, + }, +}); + +/** + * Sets the max listeners. + * @param {number} n + * @param {EventTarget[] | EventEmitter[]} [eventTargets] + * @returns {void} + */ +EventEmitter.setMaxListeners = function ( + n = defaultMaxListeners, + ...eventTargets +) { + validateNumber(n, "setMaxListeners", 0); + if (eventTargets.length === 0) { + defaultMaxListeners = n; + } else { + if (isEventTarget === undefined) + isEventTarget = require("internal/event_target").isEventTarget; + + for (let i = 0; i < eventTargets.length; i++) { + const target = eventTargets[i]; + if (isEventTarget(target)) { + target[kMaxEventTargetListeners] = n; + target[kMaxEventTargetListenersWarned] = false; + } else if (typeof target.setMaxListeners === "function") { + target.setMaxListeners(n); + } else { + throw new ERR_INVALID_ARG_TYPE( + "eventTargets", + ["EventEmitter", "EventTarget"], + target + ); + } + } + } +}; + +// If you're updating this function definition, please also update any +// re-definitions, such as the one in the Domain module (lib/domain.js). +EventEmitter.init = function (opts) { + if ( + this._events === undefined || + this._events === ObjectGetPrototypeOf(this)._events + ) { + this._events = { __proto__: null }; + this._eventsCount = 0; + this[kShapeMode] = false; + } else { + this[kShapeMode] = true; + } + + this._maxListeners ||= undefined; + + if (opts?.captureRejections) { + validateBoolean(opts.captureRejections, "options.captureRejections"); + this[kCapture] = Boolean(opts.captureRejections); + } else { + // Assigning the kCapture property directly saves an expensive + // prototype lookup in a very sensitive hot path. + this[kCapture] = EventEmitter.prototype[kCapture]; + } +}; + +function addCatch(that, promise, type, args) { + if (!that[kCapture]) { + return; + } + + // Handle Promises/A+ spec, then could be a getter + // that throws on second use. + try { + const then = promise.then; + + if (typeof then === "function") { + then.call(promise, undefined, function (err) { + // The callback is called with nextTick to avoid a follow-up + // rejection from this promise. + process.nextTick(emitUnhandledRejectionOrErr, that, err, type, args); + }); + } + } catch (err) { + that.emit("error", err); + } +} + +function emitUnhandledRejectionOrErr(ee, err, type, args) { + if (typeof ee[kRejection] === "function") { + ee[kRejection](err, type, ...args); + } else { + // We have to disable the capture rejections mechanism, otherwise + // we might end up in an infinite loop. + const prev = ee[kCapture]; + + // If the error handler throws, it is not catchable and it + // will end up in 'uncaughtException'. We restore the previous + // value of kCapture in case the uncaughtException is present + // and the exception is handled. + try { + ee[kCapture] = false; + ee.emit("error", err); + } finally { + ee[kCapture] = prev; + } + } +} + +/** + * Increases the max listeners of the event emitter. + * @param {number} n + * @returns {EventEmitter} + */ +EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) { + validateNumber(n, "setMaxListeners", 0); + this._maxListeners = n; + return this; +}; + +function _getMaxListeners(that) { + if (that._maxListeners === undefined) return EventEmitter.defaultMaxListeners; + return that._maxListeners; +} + +/** + * Returns the current max listener value for the event emitter. + * @returns {number} + */ +EventEmitter.prototype.getMaxListeners = function getMaxListeners() { + return _getMaxListeners(this); +}; + +function enhanceStackTrace(err, own) { + let ctorInfo = ""; + try { + const { name } = this.constructor; + if (name !== "EventEmitter") ctorInfo = ` on ${name} instance`; + } catch { + // Continue regardless of error. + } + const sep = `\nEmitted 'error' event${ctorInfo} at:\n`; + + const errStack = ArrayPrototypeSlice( + StringPrototypeSplit(err.stack, "\n"), + 1 + ); + const ownStack = ArrayPrototypeSlice( + StringPrototypeSplit(own.stack, "\n"), + 1 + ); + + const { len, offset } = identicalSequenceRange(ownStack, errStack); + if (len > 0) { + ArrayPrototypeSplice( + ownStack, + offset + 1, + len - 2, + " [... lines matching original stack trace ...]" + ); + } + + return err.stack + sep + ArrayPrototypeJoin(ownStack, "\n"); +} + +/** + * Synchronously calls each of the listeners registered + * for the event. + * @param {string | symbol} type + * @param {...any} [args] + * @returns {boolean} + */ +EventEmitter.prototype.emit = function emit(type, ...args) { + let doError = type === "error"; + + const events = this._events; + if (events !== undefined) { + if (doError && events[kErrorMonitor] !== undefined) + this.emit(kErrorMonitor, ...args); + doError &&= events.error === undefined; + } else if (!doError) return false; + + // If there is no 'error' event listener then throw. + if (doError) { + let er; + if (args.length > 0) er = args[0]; + if (er instanceof Error) { + try { + const capture = {}; + ErrorCaptureStackTrace(capture, EventEmitter.prototype.emit); + ObjectDefineProperty(er, kEnhanceStackBeforeInspector, { + __proto__: null, + value: FunctionPrototypeBind(enhanceStackTrace, this, er, capture), + configurable: true, + }); + } catch { + // Continue regardless of error. + } + + // Note: The comments on the `throw` lines are intentional, they show + // up in Node's output if this results in an unhandled exception. + throw er; // Unhandled 'error' event + } + + let stringifiedEr; + try { + stringifiedEr = inspect(er); + } catch { + stringifiedEr = er; + } + + // At least give some kind of context to the user + const err = new ERR_UNHANDLED_ERROR(stringifiedEr); + err.context = er; + throw err; // Unhandled 'error' event + } + + const handler = events[type]; + + if (handler === undefined) return false; + + if (typeof handler === "function") { + const result = ReflectApply(handler, this, args); + + // We check if result is undefined first because that + // is the most common case so we do not pay any perf + // penalty + if (result !== undefined && result !== null) { + addCatch(this, result, type, args); + } + } else { + const len = handler.length; + const listeners = arrayClone(handler); + for (let i = 0; i < len; ++i) { + const result = ReflectApply(listeners[i], this, args); + + // We check if result is undefined first because that + // is the most common case so we do not pay any perf + // penalty. + // This code is duplicated because extracting it away + // would make it non-inlineable. + if (result !== undefined && result !== null) { + addCatch(this, result, type, args); + } + } + } + + return true; +}; + +function _addListener(target, type, listener, prepend) { + let m; + let events; + let existing; + + checkListener(listener); + + events = target._events; + if (events === undefined) { + events = target._events = { __proto__: null }; + target._eventsCount = 0; + } else { + // To avoid recursion in the case that type === "newListener"! Before + // adding it to the listeners, first emit "newListener". + if (events.newListener !== undefined) { + target.emit("newListener", type, listener.listener ?? listener); + + // Re-assign `events` because a newListener handler could have caused the + // this._events to be assigned to a new object + events = target._events; + } + existing = events[type]; + } + + if (existing === undefined) { + // Optimize the case of one listener. Don't need the extra array object. + events[type] = listener; + ++target._eventsCount; + } else { + if (typeof existing === "function") { + // Adding the second element, need to change to array. + existing = events[type] = prepend + ? [listener, existing] + : [existing, listener]; + // If we've already got an array, just append. + } else if (prepend) { + existing.unshift(listener); + } else { + existing.push(listener); + } + + // Check for listener leak + m = _getMaxListeners(target); + if (m > 0 && existing.length > m && !existing.warned) { + existing.warned = true; + // No error code for this since it is a Warning + const w = genericNodeError( + `Possible EventEmitter memory leak detected. ${ + existing.length + } ${String(type)} listeners ` + + `added to ${inspect(target, { + depth: -1, + })}. MaxListeners is ${m}. Use emitter.setMaxListeners() to increase limit`, + { + name: "MaxListenersExceededWarning", + emitter: target, + type: type, + count: existing.length, + } + ); + process.emitWarning(w); + } + } + + return target; +} + +/** + * Adds a listener to the event emitter. + * @param {string | symbol} type + * @param {Function} listener + * @returns {EventEmitter} + */ +EventEmitter.prototype.addListener = function addListener(type, listener) { + return _addListener(this, type, listener, false); +}; + +EventEmitter.prototype.on = EventEmitter.prototype.addListener; + +/** + * Adds the `listener` function to the beginning of + * the listeners array. + * @param {string | symbol} type + * @param {Function} listener + * @returns {EventEmitter} + */ +EventEmitter.prototype.prependListener = function prependListener( + type, + listener +) { + return _addListener(this, type, listener, true); +}; + +function onceWrapper() { + if (!this.fired) { + this.target.removeListener(this.type, this.wrapFn); + this.fired = true; + if (arguments.length === 0) return this.listener.call(this.target); + return ReflectApply(this.listener, this.target, arguments); + } +} + +function _onceWrap(target, type, listener) { + const state = { fired: false, wrapFn: undefined, target, type, listener }; + const wrapped = onceWrapper.bind(state); + wrapped.listener = listener; + state.wrapFn = wrapped; + return wrapped; +} + +/** + * Adds a one-time `listener` function to the event emitter. + * @param {string | symbol} type + * @param {Function} listener + * @returns {EventEmitter} + */ +EventEmitter.prototype.once = function once(type, listener) { + checkListener(listener); + + this.on(type, _onceWrap(this, type, listener)); + return this; +}; + +/** + * Adds a one-time `listener` function to the beginning of + * the listeners array. + * @param {string | symbol} type + * @param {Function} listener + * @returns {EventEmitter} + */ +EventEmitter.prototype.prependOnceListener = function prependOnceListener( + type, + listener +) { + checkListener(listener); + + this.prependListener(type, _onceWrap(this, type, listener)); + return this; +}; + +/** + * Removes the specified `listener` from the listeners array. + * @param {string | symbol} type + * @param {Function} listener + * @returns {EventEmitter} + */ +EventEmitter.prototype.removeListener = function removeListener( + type, + listener +) { + checkListener(listener); + + const events = this._events; + if (events === undefined) return this; + + const list = events[type]; + if (list === undefined) return this; + + if (list === listener || list.listener === listener) { + this._eventsCount -= 1; + + if (this[kShapeMode]) { + events[type] = undefined; + } else if (this._eventsCount === 0) { + this._events = { __proto__: null }; + } else { + delete events[type]; + if (events.removeListener) + this.emit("removeListener", type, list.listener || listener); + } + } else if (typeof list !== "function") { + let position = -1; + + for (let i = list.length - 1; i >= 0; i--) { + if (list[i] === listener || list[i].listener === listener) { + position = i; + break; + } + } + + if (position < 0) return this; + + if (position === 0) list.shift(); + else { + spliceOne(list, position); + } + + if (list.length === 1) events[type] = list[0]; + + if (events.removeListener !== undefined) + this.emit("removeListener", type, listener); + } + + return this; +}; + +EventEmitter.prototype.off = EventEmitter.prototype.removeListener; + +/** + * Removes all listeners from the event emitter. (Only + * removes listeners for a specific event name if specified + * as `type`). + * @param {string | symbol} [type] + * @returns {EventEmitter} + */ +EventEmitter.prototype.removeAllListeners = function removeAllListeners(type) { + const events = this._events; + if (events === undefined) return this; + + // Not listening for removeListener, no need to emit + if (events.removeListener === undefined) { + if (arguments.length === 0) { + this._events = { __proto__: null }; + this._eventsCount = 0; + } else if (events[type] !== undefined) { + if (--this._eventsCount === 0) this._events = { __proto__: null }; + else delete events[type]; + } + this[kShapeMode] = false; + return this; + } + + // Emit removeListener for all listeners on all events + if (arguments.length === 0) { + for (const key of ReflectOwnKeys(events)) { + if (key === "removeListener") continue; + this.removeAllListeners(key); + } + this.removeAllListeners("removeListener"); + this._events = { __proto__: null }; + this._eventsCount = 0; + this[kShapeMode] = false; + return this; + } + + const listeners = events[type]; + + if (typeof listeners === "function") { + this.removeListener(type, listeners); + } else if (listeners !== undefined) { + // LIFO order + for (let i = listeners.length - 1; i >= 0; i--) { + this.removeListener(type, listeners[i]); + } + } + + return this; +}; + +function _listeners(target, type, unwrap) { + const events = target._events; + + if (events === undefined) return []; + + const evlistener = events[type]; + if (evlistener === undefined) return []; + + if (typeof evlistener === "function") + return unwrap ? [evlistener.listener || evlistener] : [evlistener]; + + return unwrap ? unwrapListeners(evlistener) : arrayClone(evlistener); +} + +/** + * Returns a copy of the array of listeners for the event name + * specified as `type`. + * @param {string | symbol} type + * @returns {Function[]} + */ +EventEmitter.prototype.listeners = function listeners(type) { + return _listeners(this, type, true); +}; + +/** + * Returns a copy of the array of listeners and wrappers for + * the event name specified as `type`. + * @param {string | symbol} type + * @returns {Function[]} + */ +EventEmitter.prototype.rawListeners = function rawListeners(type) { + return _listeners(this, type, false); +}; + +/** + * Returns the number of listeners listening to event name + * specified as `type`. + * @param {string | symbol} type + * @param {Function} [listener] + * @returns {number} + */ +EventEmitter.prototype.listenerCount = function listenerCount(type, listener) { + const events = this._events; + + if (events !== undefined) { + const evlistener = events[type]; + + if (typeof evlistener === "function") { + if (listener != null) { + return listener === evlistener || listener === evlistener.listener + ? 1 + : 0; + } + + return 1; + } else if (evlistener !== undefined) { + if (listener != null) { + let matching = 0; + + for (let i = 0, l = evlistener.length; i < l; i++) { + if ( + evlistener[i] === listener || + evlistener[i].listener === listener + ) { + matching++; + } + } + + return matching; + } + + return evlistener.length; + } + } + + return 0; +}; + +/** + * Returns an array listing the events for which + * the emitter has registered listeners. + * @returns {(string | symbol)[]} + */ +EventEmitter.prototype.eventNames = function eventNames() { + return this._eventsCount > 0 ? ReflectOwnKeys(this._events) : []; +}; + +function arrayClone(arr) { + // At least since V8 8.3, this implementation is faster than the previous + // which always used a simple for-loop + switch (arr.length) { + case 2: + return [arr[0], arr[1]]; + case 3: + return [arr[0], arr[1], arr[2]]; + case 4: + return [arr[0], arr[1], arr[2], arr[3]]; + case 5: + return [arr[0], arr[1], arr[2], arr[3], arr[4]]; + case 6: + return [arr[0], arr[1], arr[2], arr[3], arr[4], arr[5]]; + } + return ArrayPrototypeSlice(arr); +} + +function unwrapListeners(arr) { + const ret = arrayClone(arr); + for (let i = 0; i < ret.length; ++i) { + const orig = ret[i].listener; + if (typeof orig === "function") ret[i] = orig; + } + return ret; +} + +/** + * Returns a copy of the array of listeners for the event name + * specified as `type`. + * @param {EventEmitter | EventTarget} emitterOrTarget + * @param {string | symbol} type + * @returns {Function[]} + */ +function getEventListeners(emitterOrTarget, type) { + // First check if EventEmitter + if (typeof emitterOrTarget.listeners === "function") { + return emitterOrTarget.listeners(type); + } + // Require event target lazily to avoid always loading it + const { isEventTarget, kEvents } = require("internal/event_target"); + if (isEventTarget(emitterOrTarget)) { + const root = emitterOrTarget[kEvents].get(type); + const listeners = []; + let handler = root?.next; + while (handler?.listener !== undefined) { + const listener = handler.listener?.deref + ? handler.listener.deref() + : handler.listener; + listeners.push(listener); + handler = handler.next; + } + return listeners; + } + throw new ERR_INVALID_ARG_TYPE( + "emitter", + ["EventEmitter", "EventTarget"], + emitterOrTarget + ); +} + +/** + * Returns the max listeners set. + * @param {EventEmitter | EventTarget} emitterOrTarget + * @returns {number} + */ +function getMaxListeners(emitterOrTarget) { + if (typeof emitterOrTarget?.getMaxListeners === "function") { + return _getMaxListeners(emitterOrTarget); + } else if (typeof emitterOrTarget?.[kMaxEventTargetListeners] === "number") { + return emitterOrTarget[kMaxEventTargetListeners]; + } + + throw new ERR_INVALID_ARG_TYPE( + "emitter", + ["EventEmitter", "EventTarget"], + emitterOrTarget + ); +} + +/** + * Returns the number of registered listeners for `type`. + * @param {EventEmitter | EventTarget} emitterOrTarget + * @param {string | symbol} type + * @returns {number} + */ +function listenerCount(emitterOrTarget, type) { + if (typeof emitterOrTarget.listenerCount === "function") { + return emitterOrTarget.listenerCount(type); + } + const { isEventTarget, kEvents } = require("internal/event_target"); + if (isEventTarget(emitterOrTarget)) { + return emitterOrTarget[kEvents].get(type)?.size ?? 0; + } + throw new ERR_INVALID_ARG_TYPE( + "emitter", + ["EventEmitter", "EventTarget"], + emitterOrTarget + ); +} + +/** + * Creates a `Promise` that is fulfilled when the emitter + * emits the given event. + * @param {EventEmitter} emitter + * @param {string | symbol} name + * @param {{ signal: AbortSignal; }} [options] + * @returns {Promise} + */ +async function once(emitter, name, options = kEmptyObject) { + validateObject(options, "options"); + const { signal } = options; + validateAbortSignal(signal, "options.signal"); + if (signal?.aborted) + throw new AbortError(undefined, { cause: signal.reason }); + return new Promise((resolve, reject) => { + const errorListener = (err) => { + emitter.removeListener(name, resolver); + if (signal != null) { + eventTargetAgnosticRemoveListener(signal, "abort", abortListener); + } + reject(err); + }; + const resolver = (...args) => { + if (typeof emitter.removeListener === "function") { + emitter.removeListener("error", errorListener); + } + if (signal != null) { + eventTargetAgnosticRemoveListener(signal, "abort", abortListener); + } + resolve(args); + }; + + kResistStopPropagation ??= + require("internal/event_target").kResistStopPropagation; + const opts = { + __proto__: null, + once: true, + [kResistStopPropagation]: true, + }; + eventTargetAgnosticAddListener(emitter, name, resolver, opts); + if (name !== "error" && typeof emitter.once === "function") { + // EventTarget does not have `error` event semantics like Node + // EventEmitters, we listen to `error` events only on EventEmitters. + emitter.once("error", errorListener); + } + function abortListener() { + eventTargetAgnosticRemoveListener(emitter, name, resolver); + eventTargetAgnosticRemoveListener(emitter, "error", errorListener); + reject(new AbortError(undefined, { cause: signal?.reason })); + } + if (signal != null) { + eventTargetAgnosticAddListener(signal, "abort", abortListener, { + __proto__: null, + once: true, + [kResistStopPropagation]: true, + }); + } + }); +} + +function createIterResult(value, done) { + return { value, done }; +} + +function eventTargetAgnosticRemoveListener(emitter, name, listener, flags) { + if (typeof emitter.removeListener === "function") { + emitter.removeListener(name, listener); + } else if (typeof emitter.removeEventListener === "function") { + emitter.removeEventListener(name, listener, flags); + } else { + throw new ERR_INVALID_ARG_TYPE("emitter", "EventEmitter", emitter); + } +} + +function eventTargetAgnosticAddListener(emitter, name, listener, flags) { + if (typeof emitter.on === "function") { + if (flags?.once) { + emitter.once(name, listener); + } else { + emitter.on(name, listener); + } + } else if (typeof emitter.addEventListener === "function") { + emitter.addEventListener(name, listener, flags); + } else { + throw new ERR_INVALID_ARG_TYPE("emitter", "EventEmitter", emitter); + } +} + +/** + * Returns an `AsyncIterator` that iterates `event` events. + * @param {EventEmitter} emitter + * @param {string | symbol} event + * @param {{ + * signal: AbortSignal; + * close?: string[]; + * highWaterMark?: number, + * lowWaterMark?: number + * }} [options] + * @returns {AsyncIterator} + */ +function on(emitter, event, options = kEmptyObject) { + // Parameters validation + validateObject(options, "options"); + const signal = options.signal; + validateAbortSignal(signal, "options.signal"); + if (signal?.aborted) + throw new AbortError(undefined, { cause: signal.reason }); + // Support both highWaterMark and highWatermark for backward compatibility + const highWatermark = + options.highWaterMark ?? options.highWatermark ?? NumberMAX_SAFE_INTEGER; + validateInteger(highWatermark, "options.highWaterMark", 1); + // Support both lowWaterMark and lowWatermark for backward compatibility + const lowWatermark = options.lowWaterMark ?? options.lowWatermark ?? 1; + validateInteger(lowWatermark, "options.lowWaterMark", 1); + + // Preparing controlling queues and variables + FixedQueue ??= require("internal/fixed_queue"); + const unconsumedEvents = new FixedQueue(); + const unconsumedPromises = new FixedQueue(); + let paused = false; + let error = null; + let finished = false; + let size = 0; + + const iterator = ObjectSetPrototypeOf( + { + next() { + // First, we consume all unread events + if (size) { + const value = unconsumedEvents.shift(); + size--; + if (paused && size < lowWatermark) { + emitter.resume(); + paused = false; + } + return PromiseResolve(createIterResult(value, false)); + } + + // Then we error, if an error happened + // This happens one time if at all, because after 'error' + // we stop listening + if (error) { + const p = PromiseReject(error); + // Only the first element errors + error = null; + return p; + } + + // If the iterator is finished, resolve to done + if (finished) return closeHandler(); + + // Wait until an event happens + return new Promise(function (resolve, reject) { + unconsumedPromises.push({ resolve, reject }); + }); + }, + + return() { + return closeHandler(); + }, + + throw(err) { + if (!err || !(err instanceof Error)) { + throw new ERR_INVALID_ARG_TYPE( + "EventEmitter.AsyncIterator", + "Error", + err + ); + } + errorHandler(err); + }, + [SymbolAsyncIterator]() { + return this; + }, + [kWatermarkData]: { + /** + * The current queue size + * @returns {number} + */ + get size() { + return size; + }, + /** + * The low watermark. The emitter is resumed every time size is lower than it + * @returns {number} + */ + get low() { + return lowWatermark; + }, + /** + * The high watermark. The emitter is paused every time size is higher than it + * @returns {number} + */ + get high() { + return highWatermark; + }, + /** + * It checks whether the emitter is paused by the watermark controller or not + * @returns {boolean} + */ + get isPaused() { + return paused; + }, + }, + }, + AsyncIteratorPrototype + ); + + // Adding event handlers + const { addEventListener, removeAll } = listenersController(); + kFirstEventParam ??= require("internal/events/symbols").kFirstEventParam; + addEventListener( + emitter, + event, + options[kFirstEventParam] + ? eventHandler + : function (...args) { + return eventHandler(args); + } + ); + if (event !== "error" && typeof emitter.on === "function") { + addEventListener(emitter, "error", errorHandler); + } + const closeEvents = options?.close; + if (closeEvents?.length) { + for (let i = 0; i < closeEvents.length; i++) { + addEventListener(emitter, closeEvents[i], closeHandler); + } + } + + const abortListenerDisposable = signal + ? addAbortListener(signal, abortListener) + : null; + + return iterator; + + function abortListener() { + errorHandler(new AbortError(undefined, { cause: signal?.reason })); + } + + function eventHandler(value) { + if (unconsumedPromises.isEmpty()) { + size++; + if (!paused && size > highWatermark) { + paused = true; + emitter.pause(); + } + unconsumedEvents.push(value); + } else unconsumedPromises.shift().resolve(createIterResult(value, false)); + } + + function errorHandler(err) { + if (unconsumedPromises.isEmpty()) error = err; + else unconsumedPromises.shift().reject(err); + + closeHandler(); + } + + function closeHandler() { + abortListenerDisposable?.[SymbolDispose](); + removeAll(); + finished = true; + const doneResult = createIterResult(undefined, true); + while (!unconsumedPromises.isEmpty()) { + unconsumedPromises.shift().resolve(doneResult); + } + + return PromiseResolve(doneResult); + } +} + +function listenersController() { + const listeners = []; + + return { + addEventListener(emitter, event, handler, flags) { + eventTargetAgnosticAddListener(emitter, event, handler, flags); + ArrayPrototypePush(listeners, [emitter, event, handler, flags]); + }, + removeAll() { + while (listeners.length > 0) { + ReflectApply( + eventTargetAgnosticRemoveListener, + undefined, + ArrayPrototypePop(listeners) + ); + } + }, + }; +} diff --git a/.codesandbox/node/fs.js b/.codesandbox/node/fs.js new file mode 100644 index 00000000..c505db73 --- /dev/null +++ b/.codesandbox/node/fs.js @@ -0,0 +1,3397 @@ +"use strict"; + +const { + ArrayFromAsync, + ArrayPrototypePush, + BigIntPrototypeToString, + Boolean, + FunctionPrototypeCall, + MathMax, + Number, + ObjectDefineProperties, + ObjectDefineProperty, + Promise, + PromisePrototypeThen, + PromiseResolve, + ReflectApply, + SafeMap, + SafeSet, + StringPrototypeCharCodeAt, + StringPrototypeIndexOf, + StringPrototypeSlice, + SymbolDispose, + uncurryThis, +} = primordials; + +const { fs: constants } = internalBinding("constants"); +const { + S_IFIFO, + S_IFLNK, + S_IFMT, + S_IFREG, + S_IFSOCK, + F_OK, + O_WRONLY, + O_SYMLINK, +} = constants; + +const pathModule = require("path"); +const { isArrayBufferView } = require("internal/util/types"); + +const binding = internalBinding("fs"); + +const { createBlobFromFilePath } = require("internal/blob"); + +const { Buffer } = require("buffer"); +const { isBuffer: BufferIsBuffer } = Buffer; +const BufferToString = uncurryThis(Buffer.prototype.toString); +const { + AbortError, + aggregateTwoErrors, + codes: { ERR_ACCESS_DENIED, ERR_FS_FILE_TOO_LARGE, ERR_INVALID_ARG_VALUE }, +} = require("internal/errors"); + +const { FSReqCallback, statValues } = binding; +const { toPathIfFileURL } = require("internal/url"); +const { + customPromisifyArgs: kCustomPromisifyArgsSymbol, + getLazy, + kEmptyObject, + promisify: { custom: kCustomPromisifiedSymbol }, + SideEffectFreeRegExpPrototypeExec, + defineLazyProperties, + isWindows, + isMacOS, +} = require("internal/util"); +const { + constants: { kIoMaxLength, kMaxUserId }, + copyObject, + Dirent, + getDirent, + getDirents, + getOptions, + getValidatedFd, + getValidatedPath, + handleErrorFromBinding, + preprocessSymlinkDestination, + Stats, + getStatFsFromBinding, + getStatsFromBinding, + realpathCacheKey, + stringToFlags, + stringToSymlinkType, + toUnixTimestamp, + validateBufferArray, + validateCpOptions, + validateOffsetLengthRead, + validateOffsetLengthWrite, + validatePath, + validatePosition, + validateRmOptions, + validateRmOptionsSync, + validateRmdirOptions, + validateStringAfterArrayBufferView, + warnOnNonPortableTemplate, +} = require("internal/fs/utils"); +const { + CHAR_FORWARD_SLASH, + CHAR_BACKWARD_SLASH, +} = require("internal/constants"); +const { + isInt32, + parseFileMode, + validateBoolean, + validateBuffer, + validateEncoding, + validateFunction, + validateInteger, + validateObject, + validateOneOf, + validateString, + kValidateObjectAllowNullable, +} = require("internal/validators"); + +const permission = require("internal/process/permission"); + +let fs; + +// Lazy loaded +let cpFn; +let cpSyncFn; +let promises = null; +let ReadStream; +let WriteStream; +let rimraf; +let kResistStopPropagation; +let ReadFileContext; + +// These have to be separate because of how graceful-fs happens to do it's +// monkeypatching. +let FileReadStream; +let FileWriteStream; +let Utf8Stream; + +function lazyLoadUtf8Stream() { + Utf8Stream ??= require("internal/streams/fast-utf8-stream"); +} + +// Ensure that callbacks run in the global context. Only use this function +// for callbacks that are passed to the binding layer, callbacks that are +// invoked from JS already run in the proper scope. +function makeCallback(cb) { + validateFunction(cb, "cb"); + + return (...args) => ReflectApply(cb, this, args); +} + +// Special case of `makeCallback()` that is specific to async `*stat()` calls as +// an optimization, since the data passed back to the callback needs to be +// transformed anyway. +function makeStatsCallback(cb) { + validateFunction(cb, "cb"); + + return (err, stats) => { + if (err) return cb(err); + cb(err, getStatsFromBinding(stats)); + }; +} + +const isFd = isInt32; + +function isFileType(stats, fileType) { + // Use stats array directly to avoid creating an fs.Stats instance just for + // our internal use. + let mode = stats[1]; + if (typeof mode === "bigint") mode = Number(mode); + return (mode & S_IFMT) === fileType; +} + +/** + * Tests a user's permissions for the file or directory + * specified by `path`. + * @param {string | Buffer | URL} path + * @param {number} [mode] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function access(path, mode, callback) { + if (typeof mode === "function") { + callback = mode; + mode = F_OK; + } + + path = getValidatedPath(path); + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.access(path, mode, req); +} + +/** + * Synchronously tests a user's permissions for the file or + * directory specified by `path`. + * @param {string | Buffer | URL} path + * @param {number} [mode] + * @returns {void} + */ +function accessSync(path, mode) { + binding.access(getValidatedPath(path), mode); +} + +/** + * Tests whether or not the given path exists. + * @param {string | Buffer | URL} path + * @param {(exists?: boolean) => any} callback + * @returns {void} + */ +function exists(path, callback) { + validateFunction(callback, "cb"); + + function suppressedCallback(err) { + callback(!err); + } + + try { + fs.access(path, F_OK, suppressedCallback); + } catch { + return callback(false); + } +} + +ObjectDefineProperty(exists, kCustomPromisifiedSymbol, { + __proto__: null, + value: function exists(path) { + // eslint-disable-line func-name-matching + return new Promise((resolve) => fs.exists(path, resolve)); + }, +}); + +let showExistsDeprecation = true; +/** + * Synchronously tests whether or not the given path exists. + * @param {string | Buffer | URL} path + * @returns {boolean} + */ +function existsSync(path) { + try { + path = getValidatedPath(path); + } catch (err) { + if (showExistsDeprecation && err?.code === "ERR_INVALID_ARG_TYPE") { + process.emitWarning( + "Passing invalid argument types to fs.existsSync is deprecated", + "DeprecationWarning", + "DEP0187" + ); + showExistsDeprecation = false; + } + return false; + } + + return binding.existsSync(path); +} + +function readFileAfterOpen(err, fd) { + const context = this.context; + + if (err) { + context.callback(err); + return; + } + + context.fd = fd; + + const req = new FSReqCallback(); + req.oncomplete = readFileAfterStat; + req.context = context; + binding.fstat(fd, false, req); +} + +function readFileAfterStat(err, stats) { + const context = this.context; + + if (err) return context.close(err); + + // TODO(BridgeAR): Check if allocating a smaller chunk is better performance + // wise, similar to the promise based version (less peak memory and chunked + // stringify operations vs multiple C++/JS boundary crossings). + const size = (context.size = isFileType(stats, S_IFREG) ? stats[8] : 0); + + if (size > kIoMaxLength) { + err = new ERR_FS_FILE_TOO_LARGE(size); + return context.close(err); + } + + try { + if (size === 0) { + // TODO(BridgeAR): If an encoding is set, use the StringDecoder to concat + // the result and reuse the buffer instead of allocating a new one. + context.buffers = []; + } else { + context.buffer = Buffer.allocUnsafeSlow(size); + } + } catch (err) { + return context.close(err); + } + context.read(); +} + +function checkAborted(signal, callback) { + if (signal?.aborted) { + callback(new AbortError(undefined, { cause: signal.reason })); + return true; + } + return false; +} + +/** + * Asynchronously reads the entire contents of a file. + * @param {string | Buffer | URL | number} path + * @param {{ + * encoding?: string | null; + * flag?: string; + * signal?: AbortSignal; + * } | string} [options] + * @param {( + * err?: Error, + * data?: string | Buffer + * ) => any} callback + * @returns {void} + */ +function readFile(path, options, callback) { + callback ||= options; + validateFunction(callback, "cb"); + options = getOptions(options, { flag: "r" }); + ReadFileContext ??= require("internal/fs/read/context"); + const context = new ReadFileContext(callback, options.encoding); + context.isUserFd = isFd(path); // File descriptor ownership + + if (options.signal) { + context.signal = options.signal; + } + if (context.isUserFd) { + process.nextTick(function tick(context) { + FunctionPrototypeCall(readFileAfterOpen, { context }, null, path); + }, context); + return; + } + + if (checkAborted(options.signal, callback)) return; + + const flagsNumber = stringToFlags(options.flag, "options.flag"); + const req = new FSReqCallback(); + req.context = context; + req.oncomplete = readFileAfterOpen; + binding.open(getValidatedPath(path), flagsNumber, 0o666, req); +} + +function tryStatSync(fd, isUserFd) { + const stats = binding.fstat(fd, false, undefined, true /* shouldNotThrow */); + if (stats === undefined && !isUserFd) { + fs.closeSync(fd); + } + return stats; +} + +function tryCreateBuffer(size, fd, isUserFd) { + let threw = true; + let buffer; + try { + if (size > kIoMaxLength) { + throw new ERR_FS_FILE_TOO_LARGE(size); + } + buffer = Buffer.allocUnsafe(size); + threw = false; + } finally { + if (threw && !isUserFd) fs.closeSync(fd); + } + return buffer; +} + +function tryReadSync(fd, isUserFd, buffer, pos, len) { + let threw = true; + let bytesRead; + try { + bytesRead = fs.readSync(fd, buffer, pos, len); + threw = false; + } finally { + if (threw && !isUserFd) fs.closeSync(fd); + } + return bytesRead; +} + +/** + * Synchronously reads the entire contents of a file. + * @param {string | Buffer | URL | number} path + * @param {{ + * encoding?: string | null; + * flag?: string; + * }} [options] + * @returns {string | Buffer} + */ +function readFileSync(path, options) { + options = getOptions(options, { flag: "r" }); + + if (options.encoding === "utf8" || options.encoding === "utf-8") { + if (!isInt32(path)) { + path = getValidatedPath(path); + } + return binding.readFileUtf8(path, stringToFlags(options.flag)); + } + + const isUserFd = isFd(path); // File descriptor ownership + const fd = isUserFd ? path : fs.openSync(path, options.flag, 0o666); + + const stats = tryStatSync(fd, isUserFd); + const size = isFileType(stats, S_IFREG) ? stats[8] : 0; + let pos = 0; + let buffer; // Single buffer with file data + let buffers; // List for when size is unknown + + if (size === 0) { + buffers = []; + } else { + buffer = tryCreateBuffer(size, fd, isUserFd); + } + + let bytesRead; + + if (size !== 0) { + do { + bytesRead = tryReadSync(fd, isUserFd, buffer, pos, size - pos); + pos += bytesRead; + } while (bytesRead !== 0 && pos < size); + } else { + do { + // The kernel lies about many files. + // Go ahead and try to read some bytes. + buffer = Buffer.allocUnsafe(8192); + bytesRead = tryReadSync(fd, isUserFd, buffer, 0, 8192); + if (bytesRead !== 0) { + ArrayPrototypePush(buffers, buffer.slice(0, bytesRead)); + } + pos += bytesRead; + } while (bytesRead !== 0); + } + + if (!isUserFd) fs.closeSync(fd); + + if (size === 0) { + // Data was collected into the buffers list. + buffer = Buffer.concat(buffers, pos); + } else if (pos < size) { + buffer = buffer.slice(0, pos); + } + + if (options.encoding) buffer = buffer.toString(options.encoding); + return buffer; +} + +function defaultCloseCallback(err) { + if (err != null) throw err; +} + +/** + * Closes the file descriptor. + * @param {number} fd + * @param {(err?: Error) => any} [callback] + * @returns {void} + */ +function close(fd, callback = defaultCloseCallback) { + if (callback !== defaultCloseCallback) callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.close(fd, req); +} + +/** + * Synchronously closes the file descriptor. + * @param {number} fd + * @returns {void} + */ +function closeSync(fd) { + binding.close(fd); +} + +/** + * Asynchronously opens a file. + * @param {string | Buffer | URL} path + * @param {string | number} [flags] + * @param {string | number} [mode] + * @param {( + * err?: Error, + * fd?: number + * ) => any} callback + * @returns {void} + */ +function open(path, flags, mode, callback) { + path = getValidatedPath(path); + if (arguments.length < 3) { + callback = flags; + flags = "r"; + mode = 0o666; + } else if (typeof mode === "function") { + callback = mode; + mode = 0o666; + } else { + mode = parseFileMode(mode, "mode", 0o666); + } + const flagsNumber = stringToFlags(flags); + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + + binding.open(path, flagsNumber, mode, req); +} + +/** + * Synchronously opens a file. + * @param {string | Buffer | URL} path + * @param {string | number} [flags] + * @param {string | number} [mode] + * @returns {number} + */ +function openSync(path, flags, mode) { + return binding.open( + getValidatedPath(path), + stringToFlags(flags), + parseFileMode(mode, "mode", 0o666) + ); +} + +/** + * @param {string | Buffer | URL } path + * @param {{ + * type?: string; + * }} [options] + * @returns {Promise} + */ +function openAsBlob(path, options = kEmptyObject) { + validateObject(options, "options"); + const type = options.type || ""; + validateString(type, "options.type"); + // The underlying implementation here returns the Blob synchronously for now. + // To give ourselves flexibility to maybe return the Blob asynchronously, + // this API returns a Promise. + path = getValidatedPath(path); + return PromiseResolve(createBlobFromFilePath(path, { type })); +} + +/** + * Reads file from the specified `fd` (file descriptor). + * @param {number} fd + * @param {Buffer | TypedArray | DataView} buffer + * @param {number | { + * offset?: number; + * length?: number; + * position?: number | bigint | null; + * }} [offsetOrOptions] + * @param {number} length + * @param {number | bigint | null} position + * @param {( + * err?: Error, + * bytesRead?: number, + * buffer?: Buffer + * ) => any} callback + * @returns {void} + */ +function read(fd, buffer, offsetOrOptions, length, position, callback) { + fd = getValidatedFd(fd); + + let offset = offsetOrOptions; + let params = null; + if (arguments.length <= 4) { + if (arguments.length === 4) { + // This is fs.read(fd, buffer, options, callback) + validateObject(offsetOrOptions, "options", kValidateObjectAllowNullable); + callback = length; + params = offsetOrOptions; + } else if (arguments.length === 3) { + // This is fs.read(fd, bufferOrParams, callback) + if (!isArrayBufferView(buffer)) { + // This is fs.read(fd, params, callback) + params = buffer; + ({ buffer = Buffer.alloc(16384) } = params ?? kEmptyObject); + } + callback = offsetOrOptions; + } else { + // This is fs.read(fd, callback) + callback = buffer; + buffer = Buffer.alloc(16384); + } + + if (params !== undefined) { + validateObject(params, "options", kValidateObjectAllowNullable); + } + ({ + offset = 0, + length = buffer?.byteLength - offset, + position = null, + } = params ?? kEmptyObject); + } + + validateBuffer(buffer); + validateFunction(callback, "cb"); + + if (offset == null) { + offset = 0; + } else { + validateInteger(offset, "offset", 0); + } + + length |= 0; + + if (length === 0) { + return process.nextTick(function tick() { + callback(null, 0, buffer); + }); + } + + if (buffer.byteLength === 0) { + throw new ERR_INVALID_ARG_VALUE( + "buffer", + buffer, + "is empty and cannot be written" + ); + } + + validateOffsetLengthRead(offset, length, buffer.byteLength); + + if (position == null) { + position = -1; + } else { + validatePosition(position, "position", length); + } + + function wrapper(err, bytesRead) { + // Retain a reference to buffer so that it can't be GC'ed too soon. + callback(err, bytesRead || 0, buffer); + } + + const req = new FSReqCallback(); + req.oncomplete = wrapper; + + binding.read(fd, buffer, offset, length, position, req); +} + +ObjectDefineProperty(read, kCustomPromisifyArgsSymbol, { + __proto__: null, + value: ["bytesRead", "buffer"], + enumerable: false, +}); + +/** + * Synchronously reads the file from the + * specified `fd` (file descriptor). + * @param {number} fd + * @param {Buffer | TypedArray | DataView} buffer + * @param {number | { + * offset?: number; + * length?: number; + * position?: number | bigint | null; + * }} [offsetOrOptions] + * @param {number} [length] + * @param {number} [position] + * @returns {number} + */ +function readSync(fd, buffer, offsetOrOptions, length, position) { + fd = getValidatedFd(fd); + + validateBuffer(buffer); + + let offset = offsetOrOptions; + if (arguments.length <= 3 || typeof offsetOrOptions === "object") { + if (offsetOrOptions !== undefined) { + validateObject(offsetOrOptions, "options", kValidateObjectAllowNullable); + } + + ({ + offset = 0, + length = buffer.byteLength - offset, + position = null, + } = offsetOrOptions ?? kEmptyObject); + } + + if (offset === undefined) { + offset = 0; + } else { + validateInteger(offset, "offset", 0); + } + + length |= 0; + + if (length === 0) { + return 0; + } + + if (buffer.byteLength === 0) { + throw new ERR_INVALID_ARG_VALUE( + "buffer", + buffer, + "is empty and cannot be written" + ); + } + + validateOffsetLengthRead(offset, length, buffer.byteLength); + + if (position == null) { + position = -1; + } else { + validatePosition(position, "position", length); + } + + return binding.read(fd, buffer, offset, length, position); +} + +/** + * Reads file from the specified `fd` (file descriptor) + * and writes to an array of `ArrayBufferView`s. + * @param {number} fd + * @param {ArrayBufferView[]} buffers + * @param {number | null} [position] + * @param {( + * err?: Error, + * bytesRead?: number, + * buffers?: ArrayBufferView[] + * ) => any} callback + * @returns {void} + */ +function readv(fd, buffers, position, callback) { + function wrapper(err, read) { + callback(err, read || 0, buffers); + } + + fd = getValidatedFd(fd); + validateBufferArray(buffers); + callback ||= position; + validateFunction(callback, "cb"); + + const req = new FSReqCallback(); + req.oncomplete = wrapper; + + if (typeof position !== "number") position = null; + + binding.readBuffers(fd, buffers, position, req); +} + +ObjectDefineProperty(readv, kCustomPromisifyArgsSymbol, { + __proto__: null, + value: ["bytesRead", "buffers"], + enumerable: false, +}); + +/** + * Synchronously reads file from the + * specified `fd` (file descriptor) and writes to an array + * of `ArrayBufferView`s. + * @param {number} fd + * @param {ArrayBufferView[]} buffers + * @param {number | null} [position] + * @returns {number} + */ +function readvSync(fd, buffers, position) { + fd = getValidatedFd(fd); + validateBufferArray(buffers); + + if (typeof position !== "number") position = null; + + return binding.readBuffers(fd, buffers, position); +} + +/** + * Writes `buffer` to the specified `fd` (file descriptor). + * @param {number} fd + * @param {Buffer | TypedArray | DataView | string} buffer + * @param {number | object} [offsetOrOptions] + * @param {number} [length] + * @param {number | null} [position] + * @param {( + * err?: Error, + * bytesWritten?: number, + * buffer?: Buffer | TypedArray | DataView + * ) => any} callback + * @returns {void} + */ +function write(fd, buffer, offsetOrOptions, length, position, callback) { + function wrapper(err, written) { + // Retain a reference to buffer so that it can't be GC'ed too soon. + callback(err, written || 0, buffer); + } + + fd = getValidatedFd(fd); + + let offset = offsetOrOptions; + if (isArrayBufferView(buffer)) { + callback ||= position || length || offset; + validateFunction(callback, "cb"); + + if (typeof offset === "object") { + ({ + offset = 0, + length = buffer.byteLength - offset, + position = null, + } = offsetOrOptions ?? kEmptyObject); + } + + if (offset == null || typeof offset === "function") { + offset = 0; + } else { + validateInteger(offset, "offset", 0); + } + if (typeof length !== "number") length = buffer.byteLength - offset; + if (typeof position !== "number") position = null; + validateOffsetLengthWrite(offset, length, buffer.byteLength); + + const req = new FSReqCallback(); + req.oncomplete = wrapper; + binding.writeBuffer(fd, buffer, offset, length, position, req); + return; + } + + validateStringAfterArrayBufferView(buffer, "buffer"); + + if (typeof position !== "function") { + if (typeof offset === "function") { + position = offset; + offset = null; + } else { + position = length; + } + length = "utf8"; + } + + const str = buffer; + validateEncoding(str, length); + callback = position; + validateFunction(callback, "cb"); + + const req = new FSReqCallback(); + req.oncomplete = wrapper; + binding.writeString(fd, str, offset, length, req); +} + +ObjectDefineProperty(write, kCustomPromisifyArgsSymbol, { + __proto__: null, + value: ["bytesWritten", "buffer"], + enumerable: false, +}); + +/** + * Synchronously writes `buffer` to the + * specified `fd` (file descriptor). + * @param {number} fd + * @param {Buffer | TypedArray | DataView | string} buffer + * @param {{ + * offset?: number; + * length?: number; + * position?: number | null; + * }} [offsetOrOptions] + * @param {number} [length] + * @param {number} [position] + * @returns {number} + */ +function writeSync(fd, buffer, offsetOrOptions, length, position) { + fd = getValidatedFd(fd); + const ctx = {}; + let result; + + let offset = offsetOrOptions; + if (isArrayBufferView(buffer)) { + if (typeof offset === "object") { + ({ + offset = 0, + length = buffer.byteLength - offset, + position = null, + } = offsetOrOptions ?? kEmptyObject); + } + if (position === undefined) position = null; + if (offset == null) { + offset = 0; + } else { + validateInteger(offset, "offset", 0); + } + if (typeof length !== "number") length = buffer.byteLength - offset; + validateOffsetLengthWrite(offset, length, buffer.byteLength); + result = binding.writeBuffer( + fd, + buffer, + offset, + length, + position, + undefined, + ctx + ); + } else { + validateStringAfterArrayBufferView(buffer, "buffer"); + validateEncoding(buffer, length); + + if (offset === undefined) offset = null; + result = binding.writeString(fd, buffer, offset, length, undefined, ctx); + } + handleErrorFromBinding(ctx); + return result; +} + +/** + * Writes an array of `ArrayBufferView`s to the + * specified `fd` (file descriptor). + * @param {number} fd + * @param {ArrayBufferView[]} buffers + * @param {number | null} [position] + * @param {( + * err?: Error, + * bytesWritten?: number, + * buffers?: ArrayBufferView[] + * ) => any} callback + * @returns {void} + */ +function writev(fd, buffers, position, callback) { + function wrapper(err, written) { + callback(err, written || 0, buffers); + } + + fd = getValidatedFd(fd); + validateBufferArray(buffers); + callback ||= position; + validateFunction(callback, "cb"); + + if (buffers.length === 0) { + process.nextTick(callback, null, 0, buffers); + return; + } + + const req = new FSReqCallback(); + req.oncomplete = wrapper; + + if (typeof position !== "number") position = null; + + binding.writeBuffers(fd, buffers, position, req); +} + +ObjectDefineProperty(writev, kCustomPromisifyArgsSymbol, { + __proto__: null, + value: ["bytesWritten", "buffer"], + enumerable: false, +}); + +/** + * Synchronously writes an array of `ArrayBufferView`s + * to the specified `fd` (file descriptor). + * @param {number} fd + * @param {ArrayBufferView[]} buffers + * @param {number | null} [position] + * @returns {number} + */ +function writevSync(fd, buffers, position) { + fd = getValidatedFd(fd); + validateBufferArray(buffers); + + if (buffers.length === 0) { + return 0; + } + + if (typeof position !== "number") position = null; + + return binding.writeBuffers(fd, buffers, position); +} + +/** + * Asynchronously renames file at `oldPath` to + * the pathname provided as `newPath`. + * @param {string | Buffer | URL} oldPath + * @param {string | Buffer | URL} newPath + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function rename(oldPath, newPath, callback) { + callback = makeCallback(callback); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.rename( + getValidatedPath(oldPath, "oldPath"), + getValidatedPath(newPath, "newPath"), + req + ); +} + +/** + * Synchronously renames file at `oldPath` to + * the pathname provided as `newPath`. + * @param {string | Buffer | URL} oldPath + * @param {string | Buffer | URL} newPath + * @returns {void} + */ +function renameSync(oldPath, newPath) { + binding.rename( + getValidatedPath(oldPath, "oldPath"), + getValidatedPath(newPath, "newPath") + ); +} + +/** + * Truncates the file. + * @param {string | Buffer | URL} path + * @param {number} [len] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function truncate(path, len, callback) { + if (typeof len === "function") { + callback = len; + len = 0; + } else if (len === undefined) { + len = 0; + } + + validateInteger(len, "len"); + len = MathMax(0, len); + validateFunction(callback, "cb"); + fs.open(path, "r+", (er, fd) => { + if (er) return callback(er); + const req = new FSReqCallback(); + req.oncomplete = function oncomplete(er) { + fs.close(fd, (er2) => { + callback(aggregateTwoErrors(er2, er)); + }); + }; + binding.ftruncate(fd, len, req); + }); +} + +/** + * Synchronously truncates the file. + * @param {string | Buffer | URL} path + * @param {number} [len] + * @returns {void} + */ +function truncateSync(path, len) { + if (len === undefined) { + len = 0; + } + // Allow error to be thrown, but still close fd. + const fd = fs.openSync(path, "r+"); + try { + fs.ftruncateSync(fd, len); + } finally { + fs.closeSync(fd); + } +} + +/** + * Truncates the file descriptor. + * @param {number} fd + * @param {number} [len] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function ftruncate(fd, len = 0, callback) { + if (typeof len === "function") { + callback = len; + len = 0; + } + validateInteger(len, "len"); + len = MathMax(0, len); + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.ftruncate(fd, len, req); +} + +/** + * Synchronously truncates the file descriptor. + * @param {number} fd + * @param {number} [len] + * @returns {void} + */ +function ftruncateSync(fd, len = 0) { + validateInteger(len, "len"); + binding.ftruncate(fd, len < 0 ? 0 : len); +} + +function lazyLoadCp() { + if (cpFn === undefined) { + ({ cpFn } = require("internal/fs/cp/cp")); + cpFn = require("util").callbackify(cpFn); + ({ cpSyncFn } = require("internal/fs/cp/cp-sync")); + } +} + +function lazyLoadRimraf() { + if (rimraf === undefined) ({ rimraf } = require("internal/fs/rimraf")); +} + +/** + * Asynchronously removes a directory. + * @param {string | Buffer | URL} path + * @param {object} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function rmdir(path, options, callback) { + if (typeof options === "function") { + callback = options; + options = undefined; + } + + if (options?.recursive !== undefined) { + // This API previously accepted a `recursive` option that was deprecated + // and removed. However, in order to make the change more visible, we + // opted to throw an error if recursive is specified rather than removing it + // entirely. + throw new ERR_INVALID_ARG_VALUE( + "options.recursive", + options.recursive, + "is no longer supported" + ); + } + + callback = makeCallback(callback); + path = getValidatedPath(path); + + validateRmdirOptions(options); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.rmdir(path, req); +} + +/** + * Synchronously removes a directory. + * @param {string | Buffer | URL} path + * @param {object} [options] + * @returns {void} + */ +function rmdirSync(path, options) { + path = getValidatedPath(path); + + if (options?.recursive !== undefined) { + throw new ERR_INVALID_ARG_VALUE( + "options.recursive", + options.recursive, + "is no longer supported" + ); + } + + validateRmdirOptions(options); + binding.rmdir(path); +} + +/** + * Asynchronously removes files and + * directories (modeled on the standard POSIX `rm` utility). + * @param {string | Buffer | URL} path + * @param {{ + * force?: boolean; + * maxRetries?: number; + * recursive?: boolean; + * retryDelay?: number; + * }} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function rm(path, options, callback) { + if (typeof options === "function") { + callback = options; + options = undefined; + } + path = getValidatedPath(path); + + validateRmOptions(path, options, false, (err, options) => { + if (err) { + return callback(err); + } + lazyLoadRimraf(); + return rimraf(path, options, callback); + }); +} + +/** + * Synchronously removes files and + * directories (modeled on the standard POSIX `rm` utility). + * @param {string | Buffer | URL} path + * @param {{ + * force?: boolean; + * maxRetries?: number; + * recursive?: boolean; + * retryDelay?: number; + * }} [options] + * @returns {void} + */ +function rmSync(path, options) { + const opts = validateRmOptionsSync(path, options, false); + return binding.rmSync( + getValidatedPath(path), + opts.maxRetries, + opts.recursive, + opts.retryDelay + ); +} + +/** + * Forces all currently queued I/O operations associated + * with the file to the operating system's synchronized + * I/O completion state. + * @param {number} fd + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function fdatasync(fd, callback) { + const req = new FSReqCallback(); + req.oncomplete = makeCallback(callback); + + if (permission.isEnabled()) { + callback( + new ERR_ACCESS_DENIED( + "fdatasync API is disabled when Permission Model is enabled." + ) + ); + return; + } + binding.fdatasync(fd, req); +} + +/** + * Synchronously forces all currently queued I/O operations + * associated with the file to the operating + * system's synchronized I/O completion state. + * @param {number} fd + * @returns {void} + */ +function fdatasyncSync(fd) { + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED( + "fdatasync API is disabled when Permission Model is enabled." + ); + } + binding.fdatasync(fd); +} + +/** + * Requests for all data for the open file descriptor + * to be flushed to the storage device. + * @param {number} fd + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function fsync(fd, callback) { + const req = new FSReqCallback(); + req.oncomplete = makeCallback(callback); + if (permission.isEnabled()) { + callback( + new ERR_ACCESS_DENIED( + "fsync API is disabled when Permission Model is enabled." + ) + ); + return; + } + binding.fsync(fd, req); +} + +/** + * Synchronously requests for all data for the open + * file descriptor to be flushed to the storage device. + * @param {number} fd + * @returns {void} + */ +function fsyncSync(fd) { + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED( + "fsync API is disabled when Permission Model is enabled." + ); + } + binding.fsync(fd); +} + +/** + * Asynchronously creates a directory. + * @param {string | Buffer | URL} path + * @param {{ + * recursive?: boolean; + * mode?: string | number; + * } | number} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function mkdir(path, options, callback) { + let mode = 0o777; + let recursive = false; + if (typeof options === "function") { + callback = options; + } else if (typeof options === "number" || typeof options === "string") { + mode = parseFileMode(options, "mode"); + } else if (options) { + if (options.recursive !== undefined) { + recursive = options.recursive; + validateBoolean(recursive, "options.recursive"); + } + if (options.mode !== undefined) { + mode = parseFileMode(options.mode, "options.mode"); + } + } + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.mkdir(getValidatedPath(path), mode, recursive, req); +} + +/** + * Synchronously creates a directory. + * @param {string | Buffer | URL} path + * @param {{ + * recursive?: boolean; + * mode?: string | number; + * } | number} [options] + * @returns {string | void} + */ +function mkdirSync(path, options) { + let mode = 0o777; + let recursive = false; + if (typeof options === "number" || typeof options === "string") { + mode = parseFileMode(options, "mode"); + } else if (options) { + if (options.recursive !== undefined) { + recursive = options.recursive; + validateBoolean(recursive, "options.recursive"); + } + if (options.mode !== undefined) { + mode = parseFileMode(options.mode, "options.mode"); + } + } + + const result = binding.mkdir(getValidatedPath(path), mode, recursive); + + if (recursive) { + return result; + } +} + +/* + * An recursive algorithm for reading the entire contents of the `basePath` directory. + * This function does not validate `basePath` as a directory. It is passed directly to + * `binding.readdir`. + * @param {string} basePath + * @param {{ encoding: string, withFileTypes: boolean }} options + * @param {( + * err?: Error, + * files?: string[] | Buffer[] | Dirent[] + * ) => any} callback + * @returns {void} + */ +function readdirRecursive(basePath, options, callback) { + const context = { + withFileTypes: Boolean(options.withFileTypes), + encoding: options.encoding, + basePath, + readdirResults: [], + pathsQueue: [basePath], + }; + + let i = 0; + + function read(path) { + const req = new FSReqCallback(); + req.oncomplete = (err, result) => { + if (err) { + callback(err); + return; + } + + if (result === undefined) { + callback(null, context.readdirResults); + return; + } + + processReaddirResult({ + result, + currentPath: path, + context, + }); + + if (i < context.pathsQueue.length) { + read(context.pathsQueue[i++]); + } else { + callback(null, context.readdirResults); + } + }; + + binding.readdir(path, context.encoding, context.withFileTypes, req); + } + + read(context.pathsQueue[i++]); +} + +// Calling `readdir` with `withFileTypes=true`, the result is an array of arrays. +// The first array is the names, and the second array is the types. +// They are guaranteed to be the same length; hence, setting `length` to the length +// of the first array within the result. +const processReaddirResult = (args) => + args.context.withFileTypes ? handleDirents(args) : handleFilePaths(args); + +function handleDirents({ result, currentPath, context }) { + const { 0: names, 1: types } = result; + const { length } = names; + + for (let i = 0; i < length; i++) { + // Avoid excluding symlinks, as they are not directories. + // Refs: https://github.com/nodejs/node/issues/52663 + const fullPath = pathModule.join(currentPath, names[i]); + const dirent = getDirent(currentPath, names[i], types[i]); + ArrayPrototypePush(context.readdirResults, dirent); + + if (dirent.isDirectory() || binding.internalModuleStat(fullPath) === 1) { + ArrayPrototypePush(context.pathsQueue, fullPath); + } + } +} + +function handleFilePaths({ result, currentPath, context }) { + for (let i = 0; i < result.length; i++) { + const resultPath = pathModule.join(currentPath, result[i]); + const relativeResultPath = pathModule.relative( + context.basePath, + resultPath + ); + const stat = binding.internalModuleStat(resultPath); + ArrayPrototypePush(context.readdirResults, relativeResultPath); + + if (stat === 1) { + ArrayPrototypePush(context.pathsQueue, resultPath); + } + } +} + +/** + * An iterative algorithm for reading the entire contents of the `basePath` directory. + * This function does not validate `basePath` as a directory. It is passed directly to + * `binding.readdir`. + * @param {string} basePath + * @param {{ encoding: string, withFileTypes: boolean }} options + * @returns {string[] | Dirent[]} + */ +function readdirSyncRecursive(basePath, options) { + const context = { + withFileTypes: Boolean(options.withFileTypes), + encoding: options.encoding, + basePath, + readdirResults: [], + pathsQueue: [basePath], + }; + + function read(path) { + const readdirResult = binding.readdir( + path, + context.encoding, + context.withFileTypes + ); + + if (readdirResult === undefined) { + return; + } + + processReaddirResult({ + result: readdirResult, + currentPath: path, + context, + }); + } + + for (let i = 0; i < context.pathsQueue.length; i++) { + read(context.pathsQueue[i]); + } + + return context.readdirResults; +} + +/** + * Reads the contents of a directory. + * @param {string | Buffer | URL} path + * @param {string | { + * encoding?: string; + * withFileTypes?: boolean; + * recursive?: boolean; + * }} [options] + * @param {( + * err?: Error, + * files?: string[] | Buffer[] | Dirent[] + * ) => any} callback + * @returns {void} + */ +function readdir(path, options, callback) { + callback = makeCallback(typeof options === "function" ? options : callback); + options = getOptions(options); + path = getValidatedPath(path); + if (options.recursive != null) { + validateBoolean(options.recursive, "options.recursive"); + } + + if (options.recursive) { + // Make shallow copy to prevent mutating options from affecting results + options = copyObject(options); + + readdirRecursive(path, options, callback); + return; + } + + const req = new FSReqCallback(); + if (!options.withFileTypes) { + req.oncomplete = callback; + } else { + req.oncomplete = (err, result) => { + if (err) { + callback(err); + return; + } + getDirents(path, result, callback); + }; + } + binding.readdir(path, options.encoding, !!options.withFileTypes, req); +} + +/** + * Synchronously reads the contents of a directory. + * @param {string | Buffer | URL} path + * @param {string | { + * encoding?: string; + * withFileTypes?: boolean; + * recursive?: boolean; + * }} [options] + * @returns {string | Buffer[] | Dirent[]} + */ +function readdirSync(path, options) { + options = getOptions(options); + path = getValidatedPath(path); + if (options.recursive != null) { + validateBoolean(options.recursive, "options.recursive"); + } + + if (options.recursive) { + return readdirSyncRecursive(path, options); + } + + const result = binding.readdir( + path, + options.encoding, + !!options.withFileTypes + ); + + return result !== undefined && options.withFileTypes + ? getDirents(path, result) + : result; +} + +/** + * Invokes the callback with the `fs.Stats` + * for the file descriptor. + * @param {number} fd + * @param {{ bigint?: boolean; }} [options] + * @param {( + * err?: Error, + * stats?: Stats + * ) => any} [callback] + * @returns {void} + */ +function fstat(fd, options = { bigint: false }, callback) { + if (typeof options === "function") { + callback = options; + options = kEmptyObject; + } + callback = makeStatsCallback(callback); + + const req = new FSReqCallback(options.bigint); + req.oncomplete = callback; + binding.fstat(fd, options.bigint, req); +} + +/** + * Retrieves the `fs.Stats` for the symbolic link + * referred to by the `path`. + * @param {string | Buffer | URL} path + * @param {{ bigint?: boolean; }} [options] + * @param {( + * err?: Error, + * stats?: Stats + * ) => any} callback + * @returns {void} + */ +function lstat(path, options = { bigint: false }, callback) { + if (typeof options === "function") { + callback = options; + options = kEmptyObject; + } + callback = makeStatsCallback(callback); + path = getValidatedPath(path); + if (permission.isEnabled() && !permission.has("fs.read", path)) { + const resource = BufferIsBuffer(path) ? BufferToString(path) : path; + callback( + new ERR_ACCESS_DENIED( + "Access to this API has been restricted", + "FileSystemRead", + resource + ) + ); + return; + } + + const req = new FSReqCallback(options.bigint); + req.oncomplete = callback; + binding.lstat(path, options.bigint, req); +} + +/** + * Asynchronously gets the stats of a file. + * @param {string | Buffer | URL} path + * @param {{ bigint?: boolean; }} [options] + * @param {( + * err?: Error, + * stats?: Stats + * ) => any} callback + * @returns {void} + */ +function stat(path, options = { bigint: false }, callback) { + if (typeof options === "function") { + callback = options; + options = kEmptyObject; + } + callback = makeStatsCallback(callback); + + const req = new FSReqCallback(options.bigint); + req.oncomplete = callback; + binding.stat(getValidatedPath(path), options.bigint, req); +} + +function statfs(path, options = { bigint: false }, callback) { + if (typeof options === "function") { + callback = options; + options = kEmptyObject; + } + validateFunction(callback, "cb"); + path = getValidatedPath(path); + const req = new FSReqCallback(options.bigint); + req.oncomplete = (err, stats) => { + if (err) { + return callback(err); + } + + callback(err, getStatFsFromBinding(stats)); + }; + binding.statfs(getValidatedPath(path), options.bigint, req); +} + +/** + * Synchronously retrieves the `fs.Stats` for + * the file descriptor. + * @param {number} fd + * @param {{ bigint?: boolean; }} [options] + * @returns {Stats | undefined} + */ +function fstatSync(fd, options = { bigint: false }) { + const stats = binding.fstat(fd, options.bigint, undefined, false); + if (stats === undefined) { + return; + } + return getStatsFromBinding(stats); +} + +/** + * Synchronously retrieves the `fs.Stats` for + * the symbolic link referred to by the `path`. + * @param {string | Buffer | URL} path + * @param {{ + * bigint?: boolean; + * throwIfNoEntry?: boolean; + * }} [options] + * @returns {Stats | undefined} + */ +function lstatSync(path, options = { bigint: false, throwIfNoEntry: true }) { + path = getValidatedPath(path); + if (permission.isEnabled() && !permission.has("fs.read", path)) { + const resource = BufferIsBuffer(path) ? BufferToString(path) : path; + throw new ERR_ACCESS_DENIED( + "Access to this API has been restricted", + "FileSystemRead", + resource + ); + } + const stats = binding.lstat( + getValidatedPath(path), + options.bigint, + undefined, + options.throwIfNoEntry + ); + + if (stats === undefined) { + return; + } + return getStatsFromBinding(stats); +} + +/** + * Synchronously retrieves the `fs.Stats` + * for the `path`. + * @param {string | Buffer | URL} path + * @param {{ + * bigint?: boolean; + * throwIfNoEntry?: boolean; + * }} [options] + * @returns {Stats} + */ +function statSync(path, options = { bigint: false, throwIfNoEntry: true }) { + const stats = binding.stat( + getValidatedPath(path), + options.bigint, + undefined, + options.throwIfNoEntry + ); + if (stats === undefined) { + return undefined; + } + return getStatsFromBinding(stats); +} + +function statfsSync(path, options = { bigint: false }) { + const stats = binding.statfs(getValidatedPath(path), options.bigint); + return getStatFsFromBinding(stats); +} + +/** + * Reads the contents of a symbolic link + * referred to by `path`. + * @param {string | Buffer | URL} path + * @param {{ encoding?: string; } | string} [options] + * @param {( + * err?: Error, + * linkString?: string | Buffer + * ) => any} callback + * @returns {void} + */ +function readlink(path, options, callback) { + callback = makeCallback(typeof options === "function" ? options : callback); + options = getOptions(options); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.readlink(getValidatedPath(path), options.encoding, req); +} + +/** + * Synchronously reads the contents of a symbolic link + * referred to by `path`. + * @param {string | Buffer | URL} path + * @param {{ encoding?: string; } | string} [options] + * @returns {string | Buffer} + */ +function readlinkSync(path, options) { + options = getOptions(options); + return binding.readlink(getValidatedPath(path), options.encoding); +} + +/** + * Creates the link called `path` pointing to `target`. + * @param {string | Buffer | URL} target + * @param {string | Buffer | URL} path + * @param {string | null} [type] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function symlink(target, path, type, callback) { + if (callback === undefined) { + callback = makeCallback(type); + type = undefined; + } else { + validateOneOf(type, "type", ["dir", "file", "junction", null, undefined]); + } + + // Due to the nature of Node.js runtime, symlinks has different edge cases that can bypass + // the permission model security guarantees. Thus, this API is disabled unless fs.read + // and fs.write permission has been given. + if (permission.isEnabled() && !permission.has("fs")) { + callback( + new ERR_ACCESS_DENIED( + "fs.symlink API requires full fs.read and fs.write permissions." + ) + ); + return; + } + + target = getValidatedPath(target, "target"); + path = getValidatedPath(path); + + if (isWindows && type == null) { + let absoluteTarget; + try { + // Symlinks targets can be relative to the newly created path. + // Calculate absolute file name of the symlink target, and check + // if it is a directory. Ignore resolve error to keep symlink + // errors consistent between platforms if invalid path is + // provided. + absoluteTarget = pathModule.resolve(path, "..", target); + } catch { + // Continue regardless of error. + } + if (absoluteTarget !== undefined) { + stat(absoluteTarget, (err, stat) => { + const resolvedType = !err && stat.isDirectory() ? "dir" : "file"; + const resolvedFlags = stringToSymlinkType(resolvedType); + const destination = preprocessSymlinkDestination( + target, + resolvedType, + path + ); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.symlink(destination, path, resolvedFlags, req); + }); + return; + } + } + + const destination = preprocessSymlinkDestination(target, type, path); + + const flags = stringToSymlinkType(type); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.symlink(destination, path, flags, req); +} + +/** + * Synchronously creates the link called `path` + * pointing to `target`. + * @param {string | Buffer | URL} target + * @param {string | Buffer | URL} path + * @param {string | null} [type] + * @returns {void} + */ +function symlinkSync(target, path, type) { + validateOneOf(type, "type", ["dir", "file", "junction", null, undefined]); + if (isWindows && type == null) { + const absoluteTarget = pathModule.resolve(`${path}`, "..", `${target}`); + if (statSync(absoluteTarget, { throwIfNoEntry: false })?.isDirectory()) { + type = "dir"; + } + } + + // Due to the nature of Node.js runtime, symlinks has different edge cases that can bypass + // the permission model security guarantees. Thus, this API is disabled unless fs.read + // and fs.write permission has been given. + if (permission.isEnabled() && !permission.has("fs")) { + throw new ERR_ACCESS_DENIED( + "fs.symlink API requires full fs.read and fs.write permissions." + ); + } + + target = getValidatedPath(target, "target"); + path = getValidatedPath(path); + + binding.symlink( + preprocessSymlinkDestination(target, type, path), + path, + stringToSymlinkType(type) + ); +} + +/** + * Creates a new link from the `existingPath` + * to the `newPath`. + * @param {string | Buffer | URL} existingPath + * @param {string | Buffer | URL} newPath + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function link(existingPath, newPath, callback) { + callback = makeCallback(callback); + + existingPath = getValidatedPath(existingPath, "existingPath"); + newPath = getValidatedPath(newPath, "newPath"); + + const req = new FSReqCallback(); + req.oncomplete = callback; + + binding.link(existingPath, newPath, req); +} + +/** + * Synchronously creates a new link from the `existingPath` + * to the `newPath`. + * @param {string | Buffer | URL} existingPath + * @param {string | Buffer | URL} newPath + * @returns {void} + */ +function linkSync(existingPath, newPath) { + existingPath = getValidatedPath(existingPath, "existingPath"); + newPath = getValidatedPath(newPath, "newPath"); + + binding.link(existingPath, newPath); +} + +/** + * Asynchronously removes a file or symbolic link. + * @param {string | Buffer | URL} path + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function unlink(path, callback) { + callback = makeCallback(callback); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.unlink(getValidatedPath(path), req); +} + +/** + * Synchronously removes a file or symbolic link. + * @param {string | Buffer | URL} path + * @returns {void} + */ +function unlinkSync(path) { + binding.unlink(getValidatedPath(path)); +} + +/** + * Sets the permissions on the file. + * @param {number} fd + * @param {string | number} mode + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function fchmod(fd, mode, callback) { + mode = parseFileMode(mode, "mode"); + callback = makeCallback(callback); + + if (permission.isEnabled()) { + callback( + new ERR_ACCESS_DENIED( + "fchmod API is disabled when Permission Model is enabled." + ) + ); + return; + } + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.fchmod(fd, mode, req); +} + +/** + * Synchronously sets the permissions on the file. + * @param {number} fd + * @param {string | number} mode + * @returns {void} + */ +function fchmodSync(fd, mode) { + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED( + "fchmod API is disabled when Permission Model is enabled." + ); + } + binding.fchmod(fd, parseFileMode(mode, "mode")); +} + +/** + * Changes the permissions on a symbolic link. + * @param {string | Buffer | URL} path + * @param {number} mode + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function lchmod(path, mode, callback) { + validateFunction(callback, "cb"); + mode = parseFileMode(mode, "mode"); + fs.open(path, O_WRONLY | O_SYMLINK, (err, fd) => { + if (err) { + callback(err); + return; + } + // Prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + fs.fchmod(fd, mode, (err) => { + fs.close(fd, (err2) => { + callback(aggregateTwoErrors(err2, err)); + }); + }); + }); +} + +/** + * Synchronously changes the permissions on a symbolic link. + * @param {string | Buffer | URL} path + * @param {number} mode + * @returns {void} + */ +function lchmodSync(path, mode) { + const fd = fs.openSync(path, O_WRONLY | O_SYMLINK); + + // Prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + try { + fs.fchmodSync(fd, mode); + } finally { + fs.closeSync(fd); + } +} + +/** + * Asynchronously changes the permissions of a file. + * @param {string | Buffer | URL} path + * @param {string | number} mode + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function chmod(path, mode, callback) { + path = getValidatedPath(path); + mode = parseFileMode(mode, "mode"); + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.chmod(path, mode, req); +} + +/** + * Synchronously changes the permissions of a file. + * @param {string | Buffer | URL} path + * @param {string | number} mode + * @returns {void} + */ +function chmodSync(path, mode) { + path = getValidatedPath(path); + mode = parseFileMode(mode, "mode"); + + binding.chmod(path, mode); +} + +/** + * Sets the owner of the symbolic link. + * @param {string | Buffer | URL} path + * @param {number} uid + * @param {number} gid + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function lchown(path, uid, gid, callback) { + callback = makeCallback(callback); + path = getValidatedPath(path); + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.lchown(path, uid, gid, req); +} + +/** + * Synchronously sets the owner of the symbolic link. + * @param {string | Buffer | URL} path + * @param {number} uid + * @param {number} gid + * @returns {void} + */ +function lchownSync(path, uid, gid) { + path = getValidatedPath(path); + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + binding.lchown(path, uid, gid); +} + +/** + * Sets the owner of the file. + * @param {number} fd + * @param {number} uid + * @param {number} gid + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function fchown(fd, uid, gid, callback) { + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + callback = makeCallback(callback); + if (permission.isEnabled()) { + callback( + new ERR_ACCESS_DENIED( + "fchown API is disabled when Permission Model is enabled." + ) + ); + return; + } + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.fchown(fd, uid, gid, req); +} + +/** + * Synchronously sets the owner of the file. + * @param {number} fd + * @param {number} uid + * @param {number} gid + * @returns {void} + */ +function fchownSync(fd, uid, gid) { + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED( + "fchown API is disabled when Permission Model is enabled." + ); + } + + binding.fchown(fd, uid, gid); +} + +/** + * Asynchronously changes the owner and group + * of a file. + * @param {string | Buffer | URL} path + * @param {number} uid + * @param {number} gid + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function chown(path, uid, gid, callback) { + callback = makeCallback(callback); + path = getValidatedPath(path); + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.chown(path, uid, gid, req); +} + +/** + * Synchronously changes the owner and group + * of a file. + * @param {string | Buffer | URL} path + * @param {number} uid + * @param {number} gid + * @returns {void} + */ +function chownSync(path, uid, gid) { + path = getValidatedPath(path); + validateInteger(uid, "uid", -1, kMaxUserId); + validateInteger(gid, "gid", -1, kMaxUserId); + binding.chown(path, uid, gid); +} + +/** + * Changes the file system timestamps of the object + * referenced by `path`. + * @param {string | Buffer | URL} path + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function utimes(path, atime, mtime, callback) { + callback = makeCallback(callback); + path = getValidatedPath(path); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.utimes(path, toUnixTimestamp(atime), toUnixTimestamp(mtime), req); +} + +/** + * Synchronously changes the file system timestamps + * of the object referenced by `path`. + * @param {string | Buffer | URL} path + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @returns {void} + */ +function utimesSync(path, atime, mtime) { + binding.utimes( + getValidatedPath(path), + toUnixTimestamp(atime), + toUnixTimestamp(mtime) + ); +} + +/** + * Changes the file system timestamps of the object + * referenced by the supplied `fd` (file descriptor). + * @param {number} fd + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function futimes(fd, atime, mtime, callback) { + atime = toUnixTimestamp(atime, "atime"); + mtime = toUnixTimestamp(mtime, "mtime"); + callback = makeCallback(callback); + + if (permission.isEnabled()) { + callback( + new ERR_ACCESS_DENIED( + "futimes API is disabled when Permission Model is enabled." + ) + ); + return; + } + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.futimes(fd, atime, mtime, req); +} + +/** + * Synchronously changes the file system timestamps + * of the object referenced by the + * supplied `fd` (file descriptor). + * @param {number} fd + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @returns {void} + */ +function futimesSync(fd, atime, mtime) { + if (permission.isEnabled()) { + throw new ERR_ACCESS_DENIED( + "futimes API is disabled when Permission Model is enabled." + ); + } + + binding.futimes( + fd, + toUnixTimestamp(atime, "atime"), + toUnixTimestamp(mtime, "mtime") + ); +} + +/** + * Changes the access and modification times of + * a file in the same way as `fs.utimes()`. + * @param {string | Buffer | URL} path + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function lutimes(path, atime, mtime, callback) { + callback = makeCallback(callback); + path = getValidatedPath(path); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.lutimes(path, toUnixTimestamp(atime), toUnixTimestamp(mtime), req); +} + +/** + * Synchronously changes the access and modification + * times of a file in the same way as `fs.utimesSync()`. + * @param {string | Buffer | URL} path + * @param {number | string | Date} atime + * @param {number | string | Date} mtime + * @returns {void} + */ +function lutimesSync(path, atime, mtime) { + binding.lutimes( + getValidatedPath(path), + toUnixTimestamp(atime), + toUnixTimestamp(mtime) + ); +} + +function writeAll( + fd, + isUserFd, + buffer, + offset, + length, + signal, + flush, + callback +) { + if (signal?.aborted) { + const abortError = new AbortError(undefined, { cause: signal.reason }); + if (isUserFd) { + callback(abortError); + } else { + fs.close(fd, (err) => { + callback(aggregateTwoErrors(err, abortError)); + }); + } + return; + } + // write(fd, buffer, offset, length, position, callback) + fs.write(fd, buffer, offset, length, null, (writeErr, written) => { + if (writeErr) { + if (isUserFd) { + callback(writeErr); + } else { + fs.close(fd, (err) => { + callback(aggregateTwoErrors(err, writeErr)); + }); + } + } else if (written === length) { + if (!flush) { + if (isUserFd) { + callback(null); + } else { + fs.close(fd, callback); + } + } else { + fs.fsync(fd, (syncErr) => { + if (syncErr) { + if (isUserFd) { + callback(syncErr); + } else { + fs.close(fd, (err) => { + callback(aggregateTwoErrors(err, syncErr)); + }); + } + } else if (isUserFd) { + callback(null); + } else { + fs.close(fd, callback); + } + }); + } + } else { + offset += written; + length -= written; + writeAll(fd, isUserFd, buffer, offset, length, signal, flush, callback); + } + }); +} + +/** + * Asynchronously writes data to the file. + * @param {string | Buffer | URL | number} path + * @param {string | Buffer | TypedArray | DataView} data + * @param {{ + * encoding?: string | null; + * mode?: number; + * flag?: string; + * signal?: AbortSignal; + * flush?: boolean; + * } | string} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function writeFile(path, data, options, callback) { + callback ||= options; + validateFunction(callback, "cb"); + options = getOptions(options, { + encoding: "utf8", + mode: 0o666, + flag: "w", + flush: false, + }); + const flag = options.flag || "w"; + const flush = options.flush ?? false; + + validateBoolean(flush, "options.flush"); + + if (!isArrayBufferView(data)) { + validateStringAfterArrayBufferView(data, "data"); + data = Buffer.from(data, options.encoding || "utf8"); + } + + if (isFd(path)) { + const isUserFd = true; + const signal = options.signal; + writeAll(path, isUserFd, data, 0, data.byteLength, signal, flush, callback); + return; + } + + if (checkAborted(options.signal, callback)) return; + + fs.open(path, flag, options.mode, (openErr, fd) => { + if (openErr) { + callback(openErr); + } else { + const isUserFd = false; + const signal = options.signal; + writeAll(fd, isUserFd, data, 0, data.byteLength, signal, flush, callback); + } + }); +} + +/** + * Synchronously writes data to the file. + * @param {string | Buffer | URL | number} path + * @param {string | Buffer | TypedArray | DataView} data + * @param {{ + * encoding?: string | null; + * mode?: number; + * flag?: string; + * flush?: boolean; + * } | string} [options] + * @returns {void} + */ +function writeFileSync(path, data, options) { + options = getOptions(options, { + encoding: "utf8", + mode: 0o666, + flag: "w", + flush: false, + }); + + const flush = options.flush ?? false; + + validateBoolean(flush, "options.flush"); + + const flag = options.flag || "w"; + + // C++ fast path for string data and UTF8 encoding + if ( + typeof data === "string" && + (options.encoding === "utf8" || options.encoding === "utf-8") + ) { + if (!isInt32(path)) { + path = getValidatedPath(path); + } + + return binding.writeFileUtf8( + path, + data, + stringToFlags(flag), + parseFileMode(options.mode, "mode", 0o666) + ); + } + + if (!isArrayBufferView(data)) { + validateStringAfterArrayBufferView(data, "data"); + data = Buffer.from(data, options.encoding || "utf8"); + } + + const isUserFd = isFd(path); // File descriptor ownership + const fd = isUserFd ? path : fs.openSync(path, flag, options.mode); + + let offset = 0; + let length = data.byteLength; + try { + while (length > 0) { + const written = fs.writeSync(fd, data, offset, length); + offset += written; + length -= written; + } + + if (flush) { + fs.fsyncSync(fd); + } + } finally { + if (!isUserFd) fs.closeSync(fd); + } +} + +/** + * Asynchronously appends data to a file. + * @param {string | Buffer | URL | number} path + * @param {string | Buffer} data + * @param {{ + * encoding?: string | null; + * mode?: number; + * flag?: string; + * flush?: boolean; + * } | string} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function appendFile(path, data, options, callback) { + callback ||= options; + validateFunction(callback, "cb"); + options = getOptions(options, { encoding: "utf8", mode: 0o666, flag: "a" }); + + // Don't make changes directly on options object + options = copyObject(options); + + // Force append behavior when using a supplied file descriptor + if (!options.flag || isFd(path)) options.flag = "a"; + + fs.writeFile(path, data, options, callback); +} + +/** + * Synchronously appends data to a file. + * @param {string | Buffer | URL | number} path + * @param {string | Buffer} data + * @param {{ + * encoding?: string | null; + * mode?: number; + * flag?: string; + * } | string} [options] + * @returns {void} + */ +function appendFileSync(path, data, options) { + options = getOptions(options, { encoding: "utf8", mode: 0o666, flag: "a" }); + + // Don't make changes directly on options object + options = copyObject(options); + + // Force append behavior when using a supplied file descriptor + if (!options.flag || isFd(path)) options.flag = "a"; + + fs.writeFileSync(path, data, options); +} + +/** + * Watches for the changes on `filename`. + * @param {string | Buffer | URL} filename + * @param {string | { + * persistent?: boolean; + * recursive?: boolean; + * encoding?: string; + * signal?: AbortSignal; + * }} [options] + * @param {( + * eventType?: string, + * filename?: string | Buffer + * ) => any} [listener] + * @returns {watchers.FSWatcher} + */ +function watch(filename, options, listener) { + if (typeof options === "function") { + listener = options; + } + options = getOptions(options); + + // Don't make changes directly on options object + options = copyObject(options); + + if (options.persistent === undefined) options.persistent = true; + if (options.recursive === undefined) options.recursive = false; + + let watcher; + const watchers = require("internal/fs/watchers"); + const path = getValidatedPath(filename); + // TODO(anonrig): Remove non-native watcher when/if libuv supports recursive. + // As of November 2022, libuv does not support recursive file watch on all platforms, + // e.g. Linux due to the limitations of inotify. + if (options.recursive && !isMacOS && !isWindows) { + const nonNativeWatcher = require("internal/fs/recursive_watch"); + watcher = new nonNativeWatcher.FSWatcher(options); + watcher[watchers.kFSWatchStart](path); + } else { + watcher = new watchers.FSWatcher(); + watcher[watchers.kFSWatchStart]( + path, + options.persistent, + options.recursive, + options.encoding + ); + } + + if (listener) { + watcher.addListener("change", listener); + } + if (options.signal) { + if (options.signal.aborted) { + process.nextTick(() => watcher.close()); + } else { + const listener = () => watcher.close(); + kResistStopPropagation ??= + require("internal/event_target").kResistStopPropagation; + options.signal.addEventListener("abort", listener, { + __proto__: null, + [kResistStopPropagation]: true, + }); + watcher.once("close", () => { + options.signal.removeEventListener("abort", listener); + }); + } + } + + return watcher; +} + +const statWatchers = new SafeMap(); + +/** + * Watches for changes on `filename`. + * @param {string | Buffer | URL} filename + * @param {{ + * bigint?: boolean; + * persistent?: boolean; + * interval?: number; + * }} [options] + * @param {( + * current?: Stats, + * previous?: Stats + * ) => any} listener + * @returns {watchers.StatWatcher} + */ +function watchFile(filename, options, listener) { + filename = getValidatedPath(filename); + filename = pathModule.resolve(filename); + let stat; + + if (options === null || typeof options !== "object") { + listener = options; + options = null; + } + + options = { + // Poll interval in milliseconds. 5007 is what libev used to use. It's + // a little on the slow side but let's stick with it for now to keep + // behavioral changes to a minimum. + interval: 5007, + persistent: true, + ...options, + }; + + validateFunction(listener, "listener"); + + stat = statWatchers.get(filename); + const watchers = require("internal/fs/watchers"); + if (stat === undefined) { + stat = new watchers.StatWatcher(options.bigint); + stat[watchers.kFSStatWatcherStart]( + filename, + options.persistent, + options.interval + ); + statWatchers.set(filename, stat); + } else { + stat[watchers.kFSStatWatcherAddOrCleanRef]("add"); + } + + stat.addListener("change", listener); + return stat; +} + +/** + * Stops watching for changes on `filename`. + * @param {string | Buffer | URL} filename + * @param {() => any} [listener] + * @returns {void} + */ +function unwatchFile(filename, listener) { + filename = getValidatedPath(filename); + filename = pathModule.resolve(filename); + const stat = statWatchers.get(filename); + + if (stat === undefined) return; + const watchers = require("internal/fs/watchers"); + if (typeof listener === "function") { + const beforeListenerCount = stat.listenerCount("change"); + stat.removeListener("change", listener); + if (stat.listenerCount("change") < beforeListenerCount) + stat[watchers.kFSStatWatcherAddOrCleanRef]("clean"); + } else { + stat.removeAllListeners("change"); + stat[watchers.kFSStatWatcherAddOrCleanRef]("cleanAll"); + } + + if (stat.listenerCount("change") === 0) { + stat.stop(); + statWatchers.delete(filename); + } +} + +let splitRoot; +if (isWindows) { + // Regex to find the device root on Windows (e.g. 'c:\\'), including trailing + // slash. + const splitRootRe = /^(?:[a-zA-Z]:|[\\/]{2}[^\\/]+[\\/][^\\/]+)?[\\/]*/; + splitRoot = function splitRoot(str) { + return SideEffectFreeRegExpPrototypeExec(splitRootRe, str)[0]; + }; +} else { + splitRoot = function splitRoot(str) { + for (let i = 0; i < str.length; ++i) { + if (StringPrototypeCharCodeAt(str, i) !== CHAR_FORWARD_SLASH) + return StringPrototypeSlice(str, 0, i); + } + return str; + }; +} + +function encodeRealpathResult(result, options) { + if (!options || !options.encoding || options.encoding === "utf8") + return result; + const asBuffer = Buffer.from(result); + if (options.encoding === "buffer") { + return asBuffer; + } + return asBuffer.toString(options.encoding); +} + +// Finds the next portion of a (partial) path, up to the next path delimiter +let nextPart; +if (isWindows) { + nextPart = function nextPart(p, i) { + for (; i < p.length; ++i) { + const ch = StringPrototypeCharCodeAt(p, i); + + // Check for a separator character + if (ch === CHAR_BACKWARD_SLASH || ch === CHAR_FORWARD_SLASH) return i; + } + return -1; + }; +} else { + nextPart = function nextPart(p, i) { + return StringPrototypeIndexOf(p, "/", i); + }; +} + +/** + * Returns the resolved pathname. + * @param {string | Buffer | URL} p + * @param {string | { encoding?: string | null; }} [options] + * @returns {string | Buffer} + */ +function realpathSync(p, options) { + options = getOptions(options); + p = toPathIfFileURL(p); + if (typeof p !== "string") { + p += ""; + } + validatePath(p); + p = pathModule.resolve(p); + + const cache = options[realpathCacheKey]; + const maybeCachedResult = cache?.get(p); + if (maybeCachedResult) { + return maybeCachedResult; + } + + const seenLinks = new SafeMap(); + const knownHard = new SafeSet(); + const original = p; + + // Current character position in p + let pos; + // The partial path so far, including a trailing slash if any + let current; + // The partial path without a trailing slash (except when pointing at a root) + let base; + // The partial path scanned in the previous round, with slash + let previous; + + // Skip over roots + current = base = splitRoot(p); + pos = current.length; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows) { + const out = binding.lstat( + base, + false, + undefined, + true /* throwIfNoEntry */ + ); + if (out === undefined) { + return; + } + knownHard.add(base); + } + + // Walk down the path, swapping out linked path parts for their real + // values + // NB: p.length changes. + while (pos < p.length) { + // find the next part + const result = nextPart(p, pos); + previous = current; + if (result === -1) { + const last = StringPrototypeSlice(p, pos); + current += last; + base = previous + last; + pos = p.length; + } else { + current += StringPrototypeSlice(p, pos, result + 1); + base = previous + StringPrototypeSlice(p, pos, result); + pos = result + 1; + } + + // Continue if not a symlink, break if a pipe/socket + if (knownHard.has(base) || cache?.get(base) === base) { + if (isFileType(statValues, S_IFIFO) || isFileType(statValues, S_IFSOCK)) { + break; + } + continue; + } + + let resolvedLink; + const maybeCachedResolved = cache?.get(base); + if (maybeCachedResolved) { + resolvedLink = maybeCachedResolved; + } else { + // Use stats array directly to avoid creating an fs.Stats instance just + // for our internal use. + + const stats = binding.lstat( + base, + true, + undefined, + true /* throwIfNoEntry */ + ); + if (stats === undefined) { + return; + } + + if (!isFileType(stats, S_IFLNK)) { + knownHard.add(base); + cache?.set(base, base); + continue; + } + + // Read the link if it wasn't read before + // dev/ino always return 0 on windows, so skip the check. + let linkTarget = null; + let id; + if (!isWindows) { + const dev = BigIntPrototypeToString(stats[0], 32); + const ino = BigIntPrototypeToString(stats[7], 32); + id = `${dev}:${ino}`; + if (seenLinks.has(id)) { + linkTarget = seenLinks.get(id); + } + } + if (linkTarget === null) { + binding.stat(base, false, undefined, true); + linkTarget = binding.readlink(base, undefined); + } + resolvedLink = pathModule.resolve(previous, linkTarget); + + cache?.set(base, resolvedLink); + if (!isWindows) seenLinks.set(id, linkTarget); + } + + // Resolve the link, then start over + p = pathModule.resolve(resolvedLink, StringPrototypeSlice(p, pos)); + + // Skip over roots + current = base = splitRoot(p); + pos = current.length; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard.has(base)) { + const out = binding.lstat( + base, + false, + undefined, + true /* throwIfNoEntry */ + ); + if (out === undefined) { + return; + } + knownHard.add(base); + } + } + + cache?.set(original, p); + return encodeRealpathResult(p, options); +} + +/** + * Returns the resolved pathname. + * @param {string | Buffer | URL} path + * @param {string | { encoding?: string; }} [options] + * @returns {string | Buffer} + */ +realpathSync.native = (path, options) => { + options = getOptions(options); + return binding.realpath(getValidatedPath(path), options.encoding); +}; + +/** + * Asynchronously computes the canonical pathname by + * resolving `.`, `..` and symbolic links. + * @param {string | Buffer | URL} p + * @param {string | { encoding?: string; }} [options] + * @param {( + * err?: Error, + * resolvedPath?: string | Buffer + * ) => any} callback + * @returns {void} + */ +function realpath(p, options, callback) { + if (typeof options === "function") { + callback = options; + } else { + validateFunction(callback, "cb"); + } + options = getOptions(options); + p = toPathIfFileURL(p); + + if (typeof p !== "string") { + p += ""; + } + validatePath(p); + p = pathModule.resolve(p); + + const seenLinks = new SafeMap(); + const knownHard = new SafeSet(); + + // Current character position in p + let pos; + // The partial path so far, including a trailing slash if any + let current; + // The partial path without a trailing slash (except when pointing at a root) + let base; + // The partial path scanned in the previous round, with slash + let previous; + + current = base = splitRoot(p); + pos = current.length; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard.has(base)) { + fs.lstat(base, (err) => { + if (err) return callback(err); + knownHard.add(base); + LOOP(); + }); + } else { + process.nextTick(LOOP); + } + + // Walk down the path, swapping out linked path parts for their real + // values + function LOOP() { + // Stop if scanned past end of path + if (pos >= p.length) { + return callback(null, encodeRealpathResult(p, options)); + } + + // find the next part + const result = nextPart(p, pos); + previous = current; + if (result === -1) { + const last = StringPrototypeSlice(p, pos); + current += last; + base = previous + last; + pos = p.length; + } else { + current += StringPrototypeSlice(p, pos, result + 1); + base = previous + StringPrototypeSlice(p, pos, result); + pos = result + 1; + } + + // Continue if not a symlink, break if a pipe/socket + if (knownHard.has(base)) { + if (isFileType(statValues, S_IFIFO) || isFileType(statValues, S_IFSOCK)) { + return callback(null, encodeRealpathResult(p, options)); + } + return process.nextTick(LOOP); + } + + return fs.lstat(base, { bigint: true }, gotStat); + } + + function gotStat(err, stats) { + if (err) return callback(err); + + // If not a symlink, skip to the next path part + if (!stats.isSymbolicLink()) { + knownHard.add(base); + return process.nextTick(LOOP); + } + + // Stat & read the link if not read before. + // Call `gotTarget()` as soon as the link target is known. + // `dev`/`ino` always return 0 on windows, so skip the check. + let id; + if (!isWindows) { + const dev = BigIntPrototypeToString(stats.dev, 32); + const ino = BigIntPrototypeToString(stats.ino, 32); + id = `${dev}:${ino}`; + if (seenLinks.has(id)) { + return gotTarget(null, seenLinks.get(id)); + } + } + fs.stat(base, (err) => { + if (err) return callback(err); + + fs.readlink(base, (err, target) => { + if (!isWindows) seenLinks.set(id, target); + gotTarget(err, target); + }); + }); + } + + function gotTarget(err, target) { + if (err) return callback(err); + + gotResolvedLink(pathModule.resolve(previous, target)); + } + + function gotResolvedLink(resolvedLink) { + // Resolve the link, then start over + p = pathModule.resolve(resolvedLink, StringPrototypeSlice(p, pos)); + current = base = splitRoot(p); + pos = current.length; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard.has(base)) { + fs.lstat(base, (err) => { + if (err) return callback(err); + knownHard.add(base); + LOOP(); + }); + } else { + process.nextTick(LOOP); + } + } +} + +/** + * Asynchronously computes the canonical pathname by + * resolving `.`, `..` and symbolic links. + * @param {string | Buffer | URL} path + * @param {string | { encoding?: string; }} [options] + * @param {( + * err?: Error, + * resolvedPath?: string | Buffer + * ) => any} callback + * @returns {void} + */ +realpath.native = (path, options, callback) => { + callback = makeCallback(callback || options); + options = getOptions(options); + path = getValidatedPath(path); + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.realpath(path, options.encoding, req); +}; + +/** + * Creates a unique temporary directory. + * @param {string | Buffer | URL} prefix + * @param {string | { encoding?: string; }} [options] + * @param {( + * err?: Error, + * directory?: string + * ) => any} callback + * @returns {void} + */ +function mkdtemp(prefix, options, callback) { + callback = makeCallback(typeof options === "function" ? options : callback); + options = getOptions(options); + + prefix = getValidatedPath(prefix, "prefix"); + warnOnNonPortableTemplate(prefix); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.mkdtemp(prefix, options.encoding, req); +} + +/** + * Synchronously creates a unique temporary directory. + * @param {string | Buffer | URL} prefix + * @param {string | { encoding?: string; }} [options] + * @returns {string} + */ +function mkdtempSync(prefix, options) { + options = getOptions(options); + + prefix = getValidatedPath(prefix, "prefix"); + warnOnNonPortableTemplate(prefix); + return binding.mkdtemp(prefix, options.encoding); +} + +/** + * Synchronously creates a unique temporary directory. + * The returned value is a disposable object which removes the + * directory and its contents when disposed. + * @param {string | Buffer | URL} prefix + * @param {string | { encoding?: string; }} [options] + * @returns {object} A disposable object with a "path" property. + */ +function mkdtempDisposableSync(prefix, options) { + options = getOptions(options); + + prefix = getValidatedPath(prefix, "prefix"); + warnOnNonPortableTemplate(prefix); + + const path = binding.mkdtemp(prefix, options.encoding); + // Stash the full path in case of process.chdir() + const fullPath = pathModule.resolve(process.cwd(), path); + + const remove = () => { + binding.rmSync( + fullPath, + 0 /* maxRetries */, + true /* recursive */, + 100 /* retryDelay */ + ); + }; + return { + path, + remove, + [SymbolDispose]() { + remove(); + }, + }; +} + +/** + * Asynchronously copies `src` to `dest`. By + * default, `dest` is overwritten if it already exists. + * @param {string | Buffer | URL} src + * @param {string | Buffer | URL} dest + * @param {number} [mode] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function copyFile(src, dest, mode, callback) { + if (typeof mode === "function") { + callback = mode; + mode = 0; + } + + src = getValidatedPath(src, "src"); + dest = getValidatedPath(dest, "dest"); + callback = makeCallback(callback); + + const req = new FSReqCallback(); + req.oncomplete = callback; + binding.copyFile(src, dest, mode, req); +} + +/** + * Synchronously copies `src` to `dest`. By + * default, `dest` is overwritten if it already exists. + * @param {string | Buffer | URL} src + * @param {string | Buffer | URL} dest + * @param {number} [mode] + * @returns {void} + */ +function copyFileSync(src, dest, mode) { + binding.copyFile( + getValidatedPath(src, "src"), + getValidatedPath(dest, "dest"), + mode + ); +} + +/** + * Asynchronously copies `src` to `dest`. `src` can be a file, directory, or + * symlink. The contents of directories will be copied recursively. + * @param {string | URL} src + * @param {string | URL} dest + * @param {object} [options] + * @param {(err?: Error) => any} callback + * @returns {void} + */ +function cp(src, dest, options, callback) { + if (typeof options === "function") { + callback = options; + options = undefined; + } + callback = makeCallback(callback); + options = validateCpOptions(options); + src = getValidatedPath(src, "src"); + dest = getValidatedPath(dest, "dest"); + lazyLoadCp(); + cpFn(src, dest, options, callback); +} + +/** + * Synchronously copies `src` to `dest`. `src` can be a file, directory, or + * symlink. The contents of directories will be copied recursively. + * @param {string | URL} src + * @param {string | URL} dest + * @param {object} [options] + * @returns {void} + */ +function cpSync(src, dest, options) { + options = validateCpOptions(options); + src = getValidatedPath(src, "src"); + dest = getValidatedPath(dest, "dest"); + lazyLoadCp(); + cpSyncFn(src, dest, options); +} + +function lazyLoadStreams() { + if (!ReadStream) { + ({ ReadStream, WriteStream } = require("internal/fs/streams")); + FileReadStream = ReadStream; + FileWriteStream = WriteStream; + } +} + +/** + * Creates a readable stream with a default `highWaterMark` + * of 64 KiB. + * @param {string | Buffer | URL} path + * @param {string | { + * flags?: string; + * encoding?: string; + * fd?: number | FileHandle; + * mode?: number; + * autoClose?: boolean; + * emitClose?: boolean; + * start: number; + * end?: number; + * highWaterMark?: number; + * fs?: object | null; + * signal?: AbortSignal | null; + * }} [options] + * @returns {ReadStream} + */ +function createReadStream(path, options) { + lazyLoadStreams(); + return new ReadStream(path, options); +} + +/** + * Creates a write stream. + * @param {string | Buffer | URL} path + * @param {string | { + * flags?: string; + * encoding?: string; + * fd?: number | FileHandle; + * mode?: number; + * autoClose?: boolean; + * emitClose?: boolean; + * start: number; + * fs?: object | null; + * signal?: AbortSignal | null; + * highWaterMark?: number; + * flush?: boolean; + * }} [options] + * @returns {WriteStream} + */ +function createWriteStream(path, options) { + lazyLoadStreams(); + return new WriteStream(path, options); +} + +const lazyGlob = getLazy(() => require("internal/fs/glob").Glob); + +function glob(pattern, options, callback) { + if (typeof options === "function") { + callback = options; + options = undefined; + } + callback = makeCallback(callback); + + const Glob = lazyGlob(); + PromisePrototypeThen( + ArrayFromAsync(new Glob(pattern, options).glob()), + (res) => callback(null, res), + callback + ); +} + +function globSync(pattern, options) { + const Glob = lazyGlob(); + return new Glob(pattern, options).globSync(); +} + +module.exports = fs = { + appendFile, + appendFileSync, + access, + accessSync, + chown, + chownSync, + chmod, + chmodSync, + close, + closeSync, + copyFile, + copyFileSync, + cp, + cpSync, + createReadStream, + createWriteStream, + exists, + existsSync, + fchown, + fchownSync, + fchmod, + fchmodSync, + fdatasync, + fdatasyncSync, + fstat, + fstatSync, + fsync, + fsyncSync, + ftruncate, + ftruncateSync, + futimes, + futimesSync, + glob, + globSync, + lchown, + lchownSync, + lchmod: constants.O_SYMLINK !== undefined ? lchmod : undefined, + lchmodSync: constants.O_SYMLINK !== undefined ? lchmodSync : undefined, + link, + linkSync, + lstat, + lstatSync, + lutimes, + lutimesSync, + mkdir, + mkdirSync, + mkdtemp, + mkdtempSync, + mkdtempDisposableSync, + open, + openSync, + openAsBlob, + readdir, + readdirSync, + read, + readSync, + readv, + readvSync, + readFile, + readFileSync, + readlink, + readlinkSync, + realpath, + realpathSync, + rename, + renameSync, + rm, + rmSync, + rmdir, + rmdirSync, + stat, + statfs, + statSync, + statfsSync, + symlink, + symlinkSync, + truncate, + truncateSync, + unwatchFile, + unlink, + unlinkSync, + utimes, + utimesSync, + watch, + watchFile, + writeFile, + writeFileSync, + write, + writeSync, + writev, + writevSync, + Dirent, + Stats, + + get ReadStream() { + lazyLoadStreams(); + return ReadStream; + }, + + set ReadStream(val) { + ReadStream = val; + }, + + get WriteStream() { + lazyLoadStreams(); + return WriteStream; + }, + + set WriteStream(val) { + WriteStream = val; + }, + + // Legacy names... these have to be separate because of how graceful-fs + // (and possibly other) modules monkey patch the values. + get FileReadStream() { + lazyLoadStreams(); + return FileReadStream; + }, + + set FileReadStream(val) { + FileReadStream = val; + }, + + get FileWriteStream() { + lazyLoadStreams(); + return FileWriteStream; + }, + + set FileWriteStream(val) { + FileWriteStream = val; + }, + + get Utf8Stream() { + lazyLoadUtf8Stream(); + return Utf8Stream; + }, + + // For tests + _toUnixTimestamp: toUnixTimestamp, +}; + +defineLazyProperties(fs, "internal/fs/dir", ["Dir", "opendir", "opendirSync"]); + +ObjectDefineProperties(fs, { + constants: { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, + }, + promises: { + __proto__: null, + configurable: true, + enumerable: true, + get() { + promises ??= require("internal/fs/promises").exports; + return promises; + }, + }, +}); diff --git a/.codesandbox/node/hello-world.js b/.codesandbox/node/hello-world.js new file mode 100644 index 00000000..85722910 --- /dev/null +++ b/.codesandbox/node/hello-world.js @@ -0,0 +1,14 @@ +const http = require('node:http'); + +const hostname = '127.0.0.1'; +const port = 3000; + +const server = http.createServer((req, res) => { + res.statusCode = 200; + res.setHeader('Content-Type', 'text/plain'); + res.end('Hello, World!\n'); +}); + +server.listen(port, hostname, () => { + console.log(`Server running at http://${hostname}:${port}/`); +}); \ No newline at end of file diff --git a/.codesandbox/node/https.js b/.codesandbox/node/https.js new file mode 100644 index 00000000..05fb02fe --- /dev/null +++ b/.codesandbox/node/https.js @@ -0,0 +1,660 @@ +'use strict'; + +const { + ArrayPrototypeIndexOf, + ArrayPrototypePush, + ArrayPrototypeShift, + ArrayPrototypeSplice, + ArrayPrototypeUnshift, + FunctionPrototypeCall, + JSONStringify, + NumberParseInt, + ObjectAssign, + ObjectSetPrototypeOf, + ReflectApply, + ReflectConstruct, + SymbolAsyncDispose, +} = primordials; + +const { + assertCrypto, + kEmptyObject, + promisify, + once, +} = require('internal/util'); +const { ERR_PROXY_TUNNEL } = require('internal/errors').codes; +assertCrypto(); + +const tls = require('tls'); +const { + kProxyConfig, + checkShouldUseProxy, + filterEnvForProxies, + kWaitForProxyTunnel, +} = require('internal/http'); +const { Agent: HttpAgent } = require('_http_agent'); +const { + httpServerPreClose, + Server: HttpServer, + setupConnectionsTracking, + storeHTTPOptions, + _connectionListener, +} = require('_http_server'); +const { ClientRequest } = require('_http_client'); +let debug = require('internal/util/debuglog').debuglog('https', (fn) => { + debug = fn; +}); +const net = require('net'); +const { URL, urlToHttpOptions, isURL } = require('internal/url'); +const { validateObject } = require('internal/validators'); +const { isIP } = require('internal/net'); +const assert = require('internal/assert'); +const { getOptionValue } = require('internal/options'); + +function Server(opts, requestListener) { + if (!(this instanceof Server)) return new Server(opts, requestListener); + + let ALPNProtocols = ['http/1.1']; + if (typeof opts === 'function') { + requestListener = opts; + opts = kEmptyObject; + } else if (opts == null) { + opts = kEmptyObject; + } else { + validateObject(opts, 'options'); + // Only one of ALPNProtocols and ALPNCallback can be set, so make sure we + // only set a default ALPNProtocols if the caller has not set either of them + if (opts.ALPNProtocols || opts.ALPNCallback) + ALPNProtocols = undefined; + } + + FunctionPrototypeCall(storeHTTPOptions, this, opts); + FunctionPrototypeCall(tls.Server, this, + { + noDelay: true, + ALPNProtocols, + ...opts, + }, + _connectionListener); + + this.httpAllowHalfOpen = false; + + if (requestListener) { + this.addListener('request', requestListener); + } + + this.addListener('tlsClientError', function addListener(err, conn) { + if (!this.emit('clientError', err, conn)) + conn.destroy(err); + }); + + this.timeout = 0; + this.maxHeadersCount = null; + this.on('listening', setupConnectionsTracking); +} + +ObjectSetPrototypeOf(Server.prototype, tls.Server.prototype); +ObjectSetPrototypeOf(Server, tls.Server); + +Server.prototype.closeAllConnections = HttpServer.prototype.closeAllConnections; + +Server.prototype.closeIdleConnections = HttpServer.prototype.closeIdleConnections; + +Server.prototype.setTimeout = HttpServer.prototype.setTimeout; + +Server.prototype.close = function close() { + httpServerPreClose(this); + ReflectApply(tls.Server.prototype.close, this, arguments); + return this; +}; + +Server.prototype[SymbolAsyncDispose] = async function() { + await FunctionPrototypeCall(promisify(this.close), this); +}; + +/** + * Creates a new `https.Server` instance. + * @param {{ + * IncomingMessage?: IncomingMessage; + * ServerResponse?: ServerResponse; + * insecureHTTPParser?: boolean; + * maxHeaderSize?: number; + * }} [opts] + * @param {Function} [requestListener] + * @returns {Server} + */ +function createServer(opts, requestListener) { + return new Server(opts, requestListener); +} + +// When proxying a HTTPS request, the following needs to be done: +// https://datatracker.ietf.org/doc/html/rfc9110#CONNECT +// 1. Send a CONNECT request to the proxy server. +// 2. Wait for 200 connection established response to establish the tunnel. +// 3. Perform TLS handshake with the endpoint over the socket. +// 4. Tunnel the request using the established connection. +// +// This function computes the tunnel configuration for HTTPS requests. +// The handling of the tunnel connection is done in createConnection. +function getTunnelConfigForProxiedHttps(agent, reqOptions) { + if (!agent[kProxyConfig]) { + return null; + } + if ((reqOptions.protocol || agent.protocol) !== 'https:') { + return null; + } + const shouldUseProxy = checkShouldUseProxy(agent[kProxyConfig], reqOptions); + debug(`getTunnelConfigForProxiedHttps should use proxy for ${reqOptions.host}:${reqOptions.port}:`, shouldUseProxy); + if (!shouldUseProxy) { + return null; + } + const { auth, href } = agent[kProxyConfig]; + // The request is a HTTPS request, assemble the payload for establishing the tunnel. + const ipType = isIP(reqOptions.host); + // The request target must put IPv6 address in square brackets. + // Here reqOptions is already processed by urlToHttpOptions so we'll add them back if necessary. + // See https://www.rfc-editor.org/rfc/rfc3986#section-3.2.2 + const requestHost = ipType === 6 ? `[${reqOptions.host}]` : reqOptions.host; + const requestPort = reqOptions.port || agent.defaultPort; + const endpoint = `${requestHost}:${requestPort}`; + // The ClientRequest constructor should already have validated the host and the port. + // When the request options come from a string invalid characters would be stripped away, + // when it's an object ERR_INVALID_CHAR would be thrown. Here we just assert in case + // agent.createConnection() is called with invalid options. + assert(!endpoint.includes('\r')); + assert(!endpoint.includes('\n')); + + let payload = `CONNECT ${endpoint} HTTP/1.1\r\n`; + // The parseProxyConfigFromEnv() method should have already validated the authorization header + // value. + if (auth) { + payload += `proxy-authorization: ${auth}\r\n`; + } + if (agent.keepAlive || agent.maxSockets !== Infinity) { + payload += 'proxy-connection: keep-alive\r\n'; + } + payload += `host: ${endpoint}`; + payload += '\r\n\r\n'; + + const result = { + __proto__: null, + proxyTunnelPayload: payload, + requestOptions: { // Options used for the request sent after the tunnel is established. + __proto__: null, + servername: reqOptions.servername || ipType ? undefined : reqOptions.host, + ...reqOptions, + }, + }; + debug(`updated request for HTTPS proxy ${href} with`, result); + return result; +}; + +function establishTunnel(agent, socket, options, tunnelConfig, afterSocket) { + const { proxyTunnelPayload } = tunnelConfig; + // By default, the socket is in paused mode. Read to look for the 200 + // connection established response. + function read() { + let chunk; + while ((chunk = socket.read()) !== null) { + if (onProxyData(chunk) !== -1) { + break; + } + } + socket.on('readable', read); + } + + function cleanup() { + socket.removeListener('end', onProxyEnd); + socket.removeListener('error', onProxyError); + socket.removeListener('readable', read); + socket.setTimeout(0); // Clear the timeout for the tunnel establishment. + } + + function onProxyError(err) { + debug('onProxyError', err); + cleanup(); + afterSocket(err, socket); + } + + // Read the headers from the chunks and check for the status code. If it fails we + // clean up the socket and return an error. Otherwise we establish the tunnel. + let buffer = ''; + function onProxyData(chunk) { + const str = chunk.toString(); + debug('onProxyData', str); + buffer += str; + const headerEndIndex = buffer.indexOf('\r\n\r\n'); + if (headerEndIndex === -1) return headerEndIndex; + const statusLine = buffer.substring(0, buffer.indexOf('\r\n')); + const statusCode = statusLine.split(' ')[1]; + if (statusCode !== '200') { + debug(`onProxyData receives ${statusCode}, cleaning up`); + cleanup(); + const targetHost = proxyTunnelPayload.split('\r')[0].split(' ')[1]; + const message = `Failed to establish tunnel to ${targetHost} via ${agent[kProxyConfig].href}: ${statusLine}`; + const err = new ERR_PROXY_TUNNEL(message); + err.statusCode = NumberParseInt(statusCode); + afterSocket(err, socket); + } else { + // https://datatracker.ietf.org/doc/html/rfc9110#CONNECT + // RFC 9110 says that it can be 2xx but in the real world, proxy clients generally only + // accepts 200. + // Proxy servers are not supposed to send anything after the headers - the payload must be + // be empty. So after this point we will proceed with the tunnel e.g. starting TLS handshake. + debug('onProxyData receives 200, establishing tunnel'); + cleanup(); + + // Reuse the tunneled socket to perform the TLS handshake with the endpoint, + // then send the request. + const { requestOptions } = tunnelConfig; + tunnelConfig.requestOptions = null; + requestOptions.socket = socket; + let tunneldSocket; + const onTLSHandshakeError = (err) => { + debug('Propagate error event from tunneled socket to tunnel socket'); + afterSocket(err, tunneldSocket); + }; + tunneldSocket = tls.connect(requestOptions, () => { + debug('TLS handshake over tunnel succeeded'); + tunneldSocket.removeListener('error', onTLSHandshakeError); + afterSocket(null, tunneldSocket); + }); + tunneldSocket.on('free', () => { + debug('Propagate free event from tunneled socket to tunnel socket'); + socket.emit('free'); + }); + tunneldSocket.on('error', onTLSHandshakeError); + } + return headerEndIndex; + } + + function onProxyEnd() { + cleanup(); + const err = new ERR_PROXY_TUNNEL('Connection to establish proxy tunnel ended unexpectedly'); + afterSocket(err, socket); + } + + const proxyTunnelTimeout = tunnelConfig.requestOptions.timeout; + debug('proxyTunnelTimeout', proxyTunnelTimeout, options.timeout); + // It may be worth a separate timeout error/event. + // But it also makes sense to treat the tunnel establishment timeout as + // a normal timeout for the request. + function onProxyTimeout() { + debug('onProxyTimeout', proxyTunnelTimeout); + cleanup(); + const err = new ERR_PROXY_TUNNEL(`Connection to establish proxy tunnel timed out after ${proxyTunnelTimeout}ms`); + err.proxyTunnelTimeout = proxyTunnelTimeout; + afterSocket(err, socket); + } + + if (proxyTunnelTimeout && proxyTunnelTimeout > 0) { + debug('proxy tunnel setTimeout', proxyTunnelTimeout); + socket.setTimeout(proxyTunnelTimeout, onProxyTimeout); + } + + socket.on('error', onProxyError); + socket.on('end', onProxyEnd); + socket.write(proxyTunnelPayload); + + read(); +} + +// HTTPS agents. +// See ProxyConfig in internal/http.js for how the connection should be handled +// when the agent is configured to use a proxy server. +function createConnection(...args) { + // XXX: This signature (port, host, options) is different from all the other + // createConnection() methods. + let options, cb; + if (args[0] !== null && typeof args[0] === 'object') { + options = args[0]; + } else if (args[1] !== null && typeof args[1] === 'object') { + options = { ...args[1] }; + } else if (args[2] === null || typeof args[2] !== 'object') { + options = {}; + } else { + options = { ...args[2] }; + } + if (typeof args[0] === 'number') { + options.port = args[0]; + } + if (typeof args[1] === 'string') { + options.host = args[1]; + } + if (typeof args[args.length - 1] === 'function') { + cb = args[args.length - 1]; + } + + debug('createConnection', options); + + if (options._agentKey) { + const session = this._getSession(options._agentKey); + if (session) { + debug('reuse session for %j', options._agentKey); + options = { + session, + ...options, + }; + } + } + + let socket; + const tunnelConfig = getTunnelConfigForProxiedHttps(this, options); + debug(`https createConnection should use proxy for ${options.host}:${options.port}:`, tunnelConfig); + + if (!tunnelConfig) { + socket = tls.connect(options); + } else { + const connectOptions = { + ...this[kProxyConfig].proxyConnectionOptions, + }; + debug('Create proxy socket', connectOptions); + const onError = (err) => { + cleanupAndPropagate(err, socket); + }; + const proxyTunnelTimeout = tunnelConfig.requestOptions.timeout; + const onTimeout = () => { + const err = new ERR_PROXY_TUNNEL(`Connection to establish proxy tunnel timed out after ${proxyTunnelTimeout}ms`); + err.proxyTunnelTimeout = proxyTunnelTimeout; + cleanupAndPropagate(err, socket); + }; + const cleanupAndPropagate = once((err, currentSocket) => { + debug('cleanupAndPropagate', err); + socket.removeListener('error', onError); + socket.removeListener('timeout', onTimeout); + // An error occurred during tunnel establishment, in that case just destroy the socket. + // and propagate the error to the callback. + + // When the error comes from unexpected status code, the stream is still in good shape, + // in that case let req.onSocket handle the destruction instead. + if (err && err.code === 'ERR_PROXY_TUNNEL' && !err.statusCode) { + socket.destroy(); + } + // This error should go to: + // -> oncreate in Agent.prototype.createSocket + // -> closure in Agent.prototype.addRequest or Agent.prototype.removeSocket + if (cb) { + cb(err, currentSocket); + } + }); + const onProxyConnection = () => { + socket.removeListener('error', onError); + establishTunnel(this, socket, options, tunnelConfig, cleanupAndPropagate); + }; + if (this[kProxyConfig].protocol === 'http:') { + socket = net.connect(connectOptions, onProxyConnection); + } else { + socket = tls.connect(connectOptions, onProxyConnection); + } + + socket.on('error', onError); + if (proxyTunnelTimeout) { + socket.setTimeout(proxyTunnelTimeout, onTimeout); + } + socket[kWaitForProxyTunnel] = true; + } + + if (options._agentKey) { + // Cache new session for reuse + socket.on('session', (session) => { + this._cacheSession(options._agentKey, session); + }); + + // Evict session on error + socket.once('close', (err) => { + if (err) + this._evictSession(options._agentKey); + }); + } + + return socket; +} + +/** + * Creates a new `HttpAgent` instance. + * @param {{ + * keepAlive?: boolean; + * keepAliveMsecs?: number; + * maxSockets?: number; + * maxTotalSockets?: number; + * maxFreeSockets?: number; + * scheduling?: string; + * timeout?: number; + * maxCachedSessions?: number; + * servername?: string; + * defaultPort?: number; + * protocol?: string; + * proxyEnv?: object; + * }} [options] + * @class + */ +function Agent(options) { + if (!(this instanceof Agent)) + return new Agent(options); + + options = { __proto__: null, ...options }; + options.defaultPort ??= 443; + options.protocol ??= 'https:'; + FunctionPrototypeCall(HttpAgent, this, options); + + this.maxCachedSessions = this.options.maxCachedSessions; + if (this.maxCachedSessions === undefined) + this.maxCachedSessions = 100; + + this._sessionCache = { + map: {}, + list: [], + }; +} +ObjectSetPrototypeOf(Agent.prototype, HttpAgent.prototype); +ObjectSetPrototypeOf(Agent, HttpAgent); +Agent.prototype.createConnection = createConnection; + +/** + * Gets a unique name for a set of options. + * @param {{ + * host: string; + * port: number; + * localAddress: string; + * family: number; + * }} [options] + * @returns {string} + */ +Agent.prototype.getName = function getName(options = kEmptyObject) { + let name = FunctionPrototypeCall(HttpAgent.prototype.getName, this, options); + + name += ':'; + if (options.ca) + name += options.ca; + + name += ':'; + if (options.cert) + name += options.cert; + + name += ':'; + if (options.clientCertEngine) + name += options.clientCertEngine; + + name += ':'; + if (options.ciphers) + name += options.ciphers; + + name += ':'; + if (options.key) + name += options.key; + + name += ':'; + if (options.pfx) + name += options.pfx; + + name += ':'; + if (options.rejectUnauthorized !== undefined) + name += options.rejectUnauthorized; + + name += ':'; + if (options.servername && options.servername !== options.host) + name += options.servername; + + name += ':'; + if (options.minVersion) + name += options.minVersion; + + name += ':'; + if (options.maxVersion) + name += options.maxVersion; + + name += ':'; + if (options.secureProtocol) + name += options.secureProtocol; + + name += ':'; + if (options.crl) + name += options.crl; + + name += ':'; + if (options.honorCipherOrder !== undefined) + name += options.honorCipherOrder; + + name += ':'; + if (options.ecdhCurve) + name += options.ecdhCurve; + + name += ':'; + if (options.dhparam) + name += options.dhparam; + + name += ':'; + if (options.secureOptions !== undefined) + name += options.secureOptions; + + name += ':'; + if (options.sessionIdContext) + name += options.sessionIdContext; + + name += ':'; + if (options.sigalgs) + name += JSONStringify(options.sigalgs); + + name += ':'; + if (options.privateKeyIdentifier) + name += options.privateKeyIdentifier; + + name += ':'; + if (options.privateKeyEngine) + name += options.privateKeyEngine; + + return name; +}; + +Agent.prototype._getSession = function _getSession(key) { + return this._sessionCache.map[key]; +}; + +Agent.prototype._cacheSession = function _cacheSession(key, session) { + // Cache is disabled + if (this.maxCachedSessions === 0) + return; + + // Fast case - update existing entry + if (this._sessionCache.map[key]) { + this._sessionCache.map[key] = session; + return; + } + + // Put new entry + if (this._sessionCache.list.length >= this.maxCachedSessions) { + const oldKey = ArrayPrototypeShift(this._sessionCache.list); + debug('evicting %j', oldKey); + delete this._sessionCache.map[oldKey]; + } + + ArrayPrototypePush(this._sessionCache.list, key); + this._sessionCache.map[key] = session; +}; + +Agent.prototype._evictSession = function _evictSession(key) { + const index = ArrayPrototypeIndexOf(this._sessionCache.list, key); + if (index === -1) + return; + + ArrayPrototypeSplice(this._sessionCache.list, index, 1); + delete this._sessionCache.map[key]; +}; + +const globalAgent = new Agent({ + keepAlive: true, scheduling: 'lifo', timeout: 5000, + // This normalized from both --use-env-proxy and NODE_USE_ENV_PROXY settings. + proxyEnv: getOptionValue('--use-env-proxy') ? filterEnvForProxies(process.env) : undefined, +}); + +/** + * Makes a request to a secure web server. + * @param {...any} args + * @returns {ClientRequest} + */ +function request(...args) { + let options = {}; + + if (typeof args[0] === 'string') { + const urlStr = ArrayPrototypeShift(args); + options = urlToHttpOptions(new URL(urlStr)); + } else if (isURL(args[0])) { + options = urlToHttpOptions(ArrayPrototypeShift(args)); + } + + if (args[0] && typeof args[0] !== 'function') { + ObjectAssign(options, ArrayPrototypeShift(args)); + } + + options._defaultAgent = module.exports.globalAgent; + ArrayPrototypeUnshift(args, options); + + return ReflectConstruct(ClientRequest, args); +} + +/** + * Makes a GET request to a secure web server. + * @param {string | URL} input + * @param {{ + * agent?: Agent | boolean; + * auth?: string; + * createConnection?: Function; + * defaultPort?: number; + * family?: number; + * headers?: object; + * hints?: number; + * host?: string; + * hostname?: string; + * insecureHTTPParser?: boolean; + * joinDuplicateHeaders?: boolean; + * localAddress?: string; + * localPort?: number; + * lookup?: Function; + * maxHeaderSize?: number; + * method?: string; + * path?: string; + * port?: number; + * protocol?: string; + * setHost?: boolean; + * socketPath?: string; + * timeout?: number; + * signal?: AbortSignal; + * uniqueHeaders?: Array; + * } | string | URL} [options] + * @param {Function} [cb] + * @returns {ClientRequest} + */ +function get(input, options, cb) { + const req = request(input, options, cb); + req.end(); + return req; +} + +module.exports = { + Agent, + globalAgent, + Server, + createServer, + get, + request, +}; \ No newline at end of file diff --git a/.codesandbox/node/net.js b/.codesandbox/node/net.js new file mode 100644 index 00000000..d43f7d8a --- /dev/null +++ b/.codesandbox/node/net.js @@ -0,0 +1,2650 @@ +"use strict"; + +const { + ArrayIsArray, + ArrayPrototypeIncludes, + ArrayPrototypeIndexOf, + ArrayPrototypePush, + Boolean, + FunctionPrototypeBind, + FunctionPrototypeCall, + MathMax, + Number, + NumberIsNaN, + NumberParseInt, + ObjectDefineProperty, + ObjectSetPrototypeOf, + Symbol, + SymbolAsyncDispose, + SymbolDispose, +} = primordials; + +const EventEmitter = require("events"); +const { addAbortListener } = require("internal/events/abort_listener"); +const stream = require("stream"); +let debug = require("internal/util/debuglog").debuglog("net", (fn) => { + debug = fn; +}); +const { + kReinitializeHandle, + isIP, + isIPv4, + isIPv6, + normalizedArgsSymbol, + makeSyncWrite, +} = require("internal/net"); +const assert = require("internal/assert"); +const { UV_EADDRINUSE, UV_EINVAL, UV_ENOTCONN, UV_ECANCELED, UV_ETIMEDOUT } = + internalBinding("uv"); +const { convertIpv6StringToBuffer } = internalBinding("cares_wrap"); + +const { Buffer } = require("buffer"); +const { ShutdownWrap } = internalBinding("stream_wrap"); +const { + TCP, + TCPConnectWrap, + constants: TCPConstants, +} = internalBinding("tcp_wrap"); +const { + Pipe, + PipeConnectWrap, + constants: PipeConstants, +} = internalBinding("pipe_wrap"); +const { + newAsyncId, + defaultTriggerAsyncIdScope, + symbols: { async_id_symbol, owner_symbol }, +} = require("internal/async_hooks"); +const { + writevGeneric, + writeGeneric, + onStreamRead, + kAfterAsyncWrite, + kHandle, + kUpdateTimer, + setStreamTimeout, + kBuffer, + kBufferCb, + kBufferGen, +} = require("internal/stream_base_commons"); +const { + ErrnoException, + ExceptionWithHostPort, + NodeAggregateError, + UVExceptionWithHostPort, + codes: { + ERR_INVALID_ADDRESS_FAMILY, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_FD_TYPE, + ERR_INVALID_HANDLE_TYPE, + ERR_INVALID_IP_ADDRESS, + ERR_IP_BLOCKED, + ERR_MISSING_ARGS, + ERR_SERVER_ALREADY_LISTEN, + ERR_SERVER_NOT_RUNNING, + ERR_SOCKET_CLOSED, + ERR_SOCKET_CLOSED_BEFORE_CONNECTION, + ERR_SOCKET_CONNECTION_TIMEOUT, + }, + genericNodeError, +} = require("internal/errors"); +const { isUint8Array } = require("internal/util/types"); +const { queueMicrotask } = require("internal/process/task_queues"); +const { + guessHandleType, + isWindows, + kEmptyObject, + promisify, +} = require("internal/util"); +const { + validateAbortSignal, + validateBoolean, + validateFunction, + validateInt32, + validateNumber, + validatePort, + validateString, +} = require("internal/validators"); +const kLastWriteQueueSize = Symbol("lastWriteQueueSize"); +const { getOptionValue } = require("internal/options"); + +// Lazy loaded to improve startup performance. +let cluster; +let dns; +let BlockList; +let SocketAddress; +let autoSelectFamilyDefault = getOptionValue("--network-family-autoselection"); +let autoSelectFamilyAttemptTimeoutDefault = getOptionValue( + "--network-family-autoselection-attempt-timeout" +); + +const { clearTimeout, setTimeout } = require("timers"); +const { kTimeout } = require("internal/timers"); + +const DEFAULT_IPV4_ADDR = "0.0.0.0"; +const DEFAULT_IPV6_ADDR = "::"; + +const noop = () => {}; + +const kPerfHooksNetConnectContext = Symbol("kPerfHooksNetConnectContext"); + +const dc = require("diagnostics_channel"); +const netClientSocketChannel = dc.channel("net.client.socket"); +const netServerSocketChannel = dc.channel("net.server.socket"); +const netServerListen = dc.tracingChannel("net.server.listen"); + +const { hasObserver, startPerf, stopPerf } = require("internal/perf/observe"); +const { getDefaultHighWaterMark } = require("internal/streams/state"); + +function getFlags(options) { + let flags = 0; + if (options.ipv6Only === true) { + flags |= TCPConstants.UV_TCP_IPV6ONLY; + } + if (options.reusePort === true) { + flags |= TCPConstants.UV_TCP_REUSEPORT; + } + return flags; +} + +function createHandle(fd, is_server) { + validateInt32(fd, "fd", 0); + const type = guessHandleType(fd); + if (type === "PIPE") { + return new Pipe(is_server ? PipeConstants.SERVER : PipeConstants.SOCKET); + } + + if (type === "TCP") { + return new TCP(is_server ? TCPConstants.SERVER : TCPConstants.SOCKET); + } + + throw new ERR_INVALID_FD_TYPE(type); +} + +function getNewAsyncId(handle) { + return !handle || typeof handle.getAsyncId !== "function" + ? newAsyncId() + : handle.getAsyncId(); +} + +function isPipeName(s) { + return typeof s === "string" && toNumber(s) === false; +} + +/** + * Creates a new TCP or IPC server + * @param {{ + * allowHalfOpen?: boolean; + * pauseOnConnect?: boolean; + * }} [options] + * @param {Function} [connectionListener] + * @returns {Server} + */ + +function createServer(options, connectionListener) { + return new Server(options, connectionListener); +} + +// Target API: +// +// let s = net.connect({port: 80, host: 'google.com'}, function() { +// ... +// }); +// +// There are various forms: +// +// connect(options, [cb]) +// connect(port, [host], [cb]) +// connect(path, [cb]); +// +function connect(...args) { + const normalized = normalizeArgs(args); + const options = normalized[0]; + debug("createConnection", normalized); + const socket = new Socket(options); + + if (options.timeout) { + socket.setTimeout(options.timeout); + } + + return socket.connect(normalized); +} + +function getDefaultAutoSelectFamily() { + return autoSelectFamilyDefault; +} + +function setDefaultAutoSelectFamily(value) { + validateBoolean(value, "value"); + autoSelectFamilyDefault = value; +} + +function getDefaultAutoSelectFamilyAttemptTimeout() { + return autoSelectFamilyAttemptTimeoutDefault; +} + +function setDefaultAutoSelectFamilyAttemptTimeout(value) { + validateInt32(value, "value", 1); + + if (value < 10) { + value = 10; + } + + autoSelectFamilyAttemptTimeoutDefault = value; +} + +// Returns an array [options, cb], where options is an object, +// cb is either a function or null. +// Used to normalize arguments of Socket.prototype.connect() and +// Server.prototype.listen(). Possible combinations of parameters: +// (options[...][, cb]) +// (path[...][, cb]) +// ([port][, host][...][, cb]) +// For Socket.prototype.connect(), the [...] part is ignored +// For Server.prototype.listen(), the [...] part is [, backlog] +// but will not be handled here (handled in listen()) +function normalizeArgs(args) { + let arr; + + if (args.length === 0) { + arr = [{}, null]; + arr[normalizedArgsSymbol] = true; + return arr; + } + + const arg0 = args[0]; + let options = {}; + if (typeof arg0 === "object" && arg0 !== null) { + // (options[...][, cb]) + options = arg0; + } else if (isPipeName(arg0)) { + // (path[...][, cb]) + options.path = arg0; + } else { + // ([port][, host][...][, cb]) + options.port = arg0; + if (args.length > 1 && typeof args[1] === "string") { + options.host = args[1]; + } + } + + const cb = args[args.length - 1]; + if (typeof cb !== "function") arr = [options, null]; + else arr = [options, cb]; + + arr[normalizedArgsSymbol] = true; + return arr; +} + +// Called when creating new Socket, or when re-using a closed Socket +function initSocketHandle(self) { + self._undestroy(); + self._sockname = null; + + // Handle creation may be deferred to bind() or connect() time. + if (self._handle) { + self._handle[owner_symbol] = self; + self._handle.onread = onStreamRead; + self[async_id_symbol] = getNewAsyncId(self._handle); + + let userBuf = self[kBuffer]; + if (userBuf) { + const bufGen = self[kBufferGen]; + if (bufGen !== null) { + userBuf = bufGen(); + if (!isUint8Array(userBuf)) return; + self[kBuffer] = userBuf; + } + self._handle.useUserBuffer(userBuf); + } + } +} + +function closeSocketHandle(self, isException, isCleanupPending = false) { + if (self._handle) { + self._handle.close(() => { + debug("emit close"); + self.emit("close", isException); + if (isCleanupPending) { + self._handle.onread = noop; + self._handle = null; + self._sockname = null; + } + }); + } +} + +const kBytesRead = Symbol("kBytesRead"); +const kBytesWritten = Symbol("kBytesWritten"); +const kSetNoDelay = Symbol("kSetNoDelay"); +const kSetKeepAlive = Symbol("kSetKeepAlive"); +const kSetKeepAliveInitialDelay = Symbol("kSetKeepAliveInitialDelay"); + +function Socket(options) { + if (!(this instanceof Socket)) return new Socket(options); + if (options?.objectMode) { + throw new ERR_INVALID_ARG_VALUE( + "options.objectMode", + options.objectMode, + "is not supported" + ); + } else if (options?.readableObjectMode || options?.writableObjectMode) { + throw new ERR_INVALID_ARG_VALUE( + `options.${ + options.readableObjectMode ? "readableObjectMode" : "writableObjectMode" + }`, + options.readableObjectMode || options.writableObjectMode, + "is not supported" + ); + } + if (options?.keepAliveInitialDelay !== undefined) { + validateNumber( + options?.keepAliveInitialDelay, + "options.keepAliveInitialDelay" + ); + + if (options.keepAliveInitialDelay < 0) { + options.keepAliveInitialDelay = 0; + } + } + + this.connecting = false; + // Problem with this is that users can supply their own handle, that may not + // have _handle.getAsyncId(). In this case an[async_id_symbol] should + // probably be supplied by async_hooks. + this[async_id_symbol] = -1; + this._hadError = false; + this[kHandle] = null; + this._parent = null; + this._host = null; + this[kLastWriteQueueSize] = 0; + this[kTimeout] = null; + this[kBuffer] = null; + this[kBufferCb] = null; + this[kBufferGen] = null; + this._closeAfterHandlingError = false; + + if (typeof options === "number") + options = { fd: options }; // Legacy interface. + else options = { ...options }; + + // Default to *not* allowing half open sockets. + options.allowHalfOpen = Boolean(options.allowHalfOpen); + // For backwards compat do not emit close on destroy. + options.emitClose = false; + options.autoDestroy = true; + // Handle strings directly. + options.decodeStrings = false; + stream.Duplex.call(this, options); + + if (options.handle) { + this._handle = options.handle; // private + this[async_id_symbol] = getNewAsyncId(this._handle); + } else if (options.fd !== undefined) { + const { fd } = options; + let err; + + // createHandle will throw ERR_INVALID_FD_TYPE if `fd` is not + // a valid `PIPE` or `TCP` descriptor + this._handle = createHandle(fd, false); + + err = this._handle.open(fd); + + // While difficult to fabricate, in some architectures + // `open` may return an error code for valid file descriptors + // which cannot be opened. This is difficult to test as most + // un-openable fds will throw on `createHandle` + if (err) throw new ErrnoException(err, "open"); + + this[async_id_symbol] = this._handle.getAsyncId(); + + if ((fd === 1 || fd === 2) && this._handle instanceof Pipe && isWindows) { + // Make stdout and stderr blocking on Windows + err = this._handle.setBlocking(true); + if (err) throw new ErrnoException(err, "setBlocking"); + + this._writev = null; + this._write = makeSyncWrite(fd); + // makeSyncWrite adjusts this value like the original handle would, so + // we need to let it do that by turning it into a writable, own + // property. + ObjectDefineProperty(this._handle, "bytesWritten", { + __proto__: null, + value: 0, + writable: true, + }); + } + } + + const onread = options.onread; + if ( + onread !== null && + typeof onread === "object" && + (isUint8Array(onread.buffer) || typeof onread.buffer === "function") && + typeof onread.callback === "function" + ) { + if (typeof onread.buffer === "function") { + this[kBuffer] = true; + this[kBufferGen] = onread.buffer; + } else { + this[kBuffer] = onread.buffer; + } + this[kBufferCb] = onread.callback; + } + + this[kSetNoDelay] = Boolean(options.noDelay); + this[kSetKeepAlive] = Boolean(options.keepAlive); + this[kSetKeepAliveInitialDelay] = ~~(options.keepAliveInitialDelay / 1000); + + // Shut down the socket when we're finished with it. + this.on("end", onReadableStreamEnd); + + initSocketHandle(this); + + this._pendingData = null; + this._pendingEncoding = ""; + + // If we have a handle, then start the flow of data into the + // buffer. if not, then this will happen when we connect + if (this._handle && options.readable !== false) { + if (options.pauseOnCreate) { + // Stop the handle from reading and pause the stream + this._handle.reading = false; + this._handle.readStop(); + this.readableFlowing = false; + } else if (!options.manualStart) { + this.read(0); + } + } + + if (options.signal) { + addClientAbortSignalOption(this, options); + } + + // Reserve properties + this.server = null; + this._server = null; + + // Used after `.destroy()` + this[kBytesRead] = 0; + this[kBytesWritten] = 0; + if (options.blockList) { + if (!module.exports.BlockList.isBlockList(options.blockList)) { + throw new ERR_INVALID_ARG_TYPE( + "options.blockList", + "net.BlockList", + options.blockList + ); + } + this.blockList = options.blockList; + } +} +ObjectSetPrototypeOf(Socket.prototype, stream.Duplex.prototype); +ObjectSetPrototypeOf(Socket, stream.Duplex); + +// Refresh existing timeouts. +Socket.prototype._unrefTimer = function _unrefTimer() { + for (let s = this; s !== null; s = s._parent) { + if (s[kTimeout]) s[kTimeout].refresh(); + } +}; + +// The user has called .end(), and all the bytes have been +// sent out to the other side. +Socket.prototype._final = function (cb) { + // If still connecting - defer handling `_final` until 'connect' will happen + if (this.connecting) { + debug("_final: not yet connected"); + return this.once("connect", () => this._final(cb)); + } + + if (!this._handle) return cb(); + + debug("_final: not ended, call shutdown()"); + + const req = new ShutdownWrap(); + req.oncomplete = afterShutdown; + req.handle = this._handle; + req.callback = cb; + const err = this._handle.shutdown(req); + + if (err === 1 || err === UV_ENOTCONN) + // synchronous finish + return cb(); + else if (err !== 0) return cb(new ErrnoException(err, "shutdown")); +}; + +function afterShutdown() { + const self = this.handle[owner_symbol]; + + debug("afterShutdown destroyed=%j", self.destroyed); + + this.callback(); +} + +// Provide a better error message when we call end() as a result +// of the other side sending a FIN. The standard 'write after end' +// is overly vague, and makes it seem like the user's code is to blame. +function writeAfterFIN(chunk, encoding, cb) { + if (!this.writableEnded) { + return stream.Duplex.prototype.write.call(this, chunk, encoding, cb); + } + + if (typeof encoding === "function") { + cb = encoding; + encoding = null; + } + + const er = genericNodeError("This socket has been ended by the other party", { + code: "EPIPE", + }); + if (typeof cb === "function") { + defaultTriggerAsyncIdScope(this[async_id_symbol], process.nextTick, cb, er); + } + this.destroy(er); + + return false; +} + +Socket.prototype.setTimeout = setStreamTimeout; + +Socket.prototype._onTimeout = function () { + const handle = this._handle; + const lastWriteQueueSize = this[kLastWriteQueueSize]; + if (lastWriteQueueSize > 0 && handle) { + // `lastWriteQueueSize !== writeQueueSize` means there is + // an active write in progress, so we suppress the timeout. + const { writeQueueSize } = handle; + if (lastWriteQueueSize !== writeQueueSize) { + this[kLastWriteQueueSize] = writeQueueSize; + this._unrefTimer(); + return; + } + } + debug("_onTimeout"); + this.emit("timeout"); +}; + +Socket.prototype.setNoDelay = function (enable) { + // Backwards compatibility: assume true when `enable` is omitted + enable = Boolean(enable === undefined ? true : enable); + + if (!this._handle) { + this[kSetNoDelay] = enable; + return this; + } + + if (this._handle.setNoDelay && enable !== this[kSetNoDelay]) { + this[kSetNoDelay] = enable; + this._handle.setNoDelay(enable); + } + + return this; +}; + +Socket.prototype.setKeepAlive = function (enable, initialDelayMsecs) { + enable = Boolean(enable); + const initialDelay = ~~(initialDelayMsecs / 1000); + + if (!this._handle) { + this[kSetKeepAlive] = enable; + this[kSetKeepAliveInitialDelay] = initialDelay; + return this; + } + + if (!this._handle.setKeepAlive) { + return this; + } + + if ( + enable !== this[kSetKeepAlive] || + (enable && this[kSetKeepAliveInitialDelay] !== initialDelay) + ) { + this[kSetKeepAlive] = enable; + this[kSetKeepAliveInitialDelay] = initialDelay; + this._handle.setKeepAlive(enable, initialDelay); + } + + return this; +}; + +Socket.prototype.address = function () { + return this._getsockname(); +}; + +ObjectDefineProperty(Socket.prototype, "_connecting", { + __proto__: null, + get: function () { + return this.connecting; + }, +}); + +ObjectDefineProperty(Socket.prototype, "pending", { + __proto__: null, + get() { + return !this._handle || this.connecting; + }, + configurable: true, +}); + +ObjectDefineProperty(Socket.prototype, "readyState", { + __proto__: null, + get: function () { + if (this.connecting) { + return "opening"; + } else if (this.readable && this.writable) { + return "open"; + } else if (this.readable && !this.writable) { + return "readOnly"; + } else if (!this.readable && this.writable) { + return "writeOnly"; + } + return "closed"; + }, +}); + +ObjectDefineProperty(Socket.prototype, "bufferSize", { + __proto__: null, + get: function () { + if (this._handle) { + return this.writableLength; + } + }, +}); + +ObjectDefineProperty(Socket.prototype, kUpdateTimer, { + __proto__: null, + get: function () { + return this._unrefTimer; + }, +}); + +function tryReadStart(socket) { + // Not already reading, start the flow + debug("Socket._handle.readStart"); + socket._handle.reading = true; + const err = socket._handle.readStart(); + if (err) socket.destroy(new ErrnoException(err, "read")); +} + +// Just call handle.readStart until we have enough in the buffer +Socket.prototype._read = function (n) { + debug( + "_read - n", + n, + "isConnecting?", + !!this.connecting, + "hasHandle?", + !!this._handle + ); + + if (this.connecting || !this._handle) { + debug("_read wait for connection"); + this.once("connect", () => this._read(n)); + } else if (!this._handle.reading) { + tryReadStart(this); + } +}; + +Socket.prototype.end = function (data, encoding, callback) { + stream.Duplex.prototype.end.call(this, data, encoding, callback); + return this; +}; + +Socket.prototype.resetAndDestroy = function () { + if (this._handle) { + if (!(this._handle instanceof TCP)) throw new ERR_INVALID_HANDLE_TYPE(); + if (this.connecting) { + debug("reset wait for connection"); + this.once("connect", () => this._reset()); + } else { + this._reset(); + } + } else { + this.destroy(new ERR_SOCKET_CLOSED()); + } + return this; +}; + +Socket.prototype.pause = function () { + if (this[kBuffer] && !this.connecting && this._handle?.reading) { + this._handle.reading = false; + if (!this.destroyed) { + const err = this._handle.readStop(); + if (err) this.destroy(new ErrnoException(err, "read")); + } + } + return stream.Duplex.prototype.pause.call(this); +}; + +Socket.prototype.resume = function () { + if ( + this[kBuffer] && + !this.connecting && + this._handle && + !this._handle.reading + ) { + tryReadStart(this); + } + return stream.Duplex.prototype.resume.call(this); +}; + +Socket.prototype.read = function (n) { + if ( + this[kBuffer] && + !this.connecting && + this._handle && + !this._handle.reading + ) { + tryReadStart(this); + } + return stream.Duplex.prototype.read.call(this, n); +}; + +// Called when the 'end' event is emitted. +function onReadableStreamEnd() { + if (!this.allowHalfOpen) { + this.write = writeAfterFIN; + } +} + +Socket.prototype.destroySoon = function () { + if (this.writable) this.end(); + + if (this.writableFinished) this.destroy(); + else this.once("finish", this.destroy); +}; + +Socket.prototype._destroy = function (exception, cb) { + debug("destroy"); + + this.connecting = false; + + for (let s = this; s !== null; s = s._parent) { + clearTimeout(s[kTimeout]); + } + + debug("close"); + if (this._handle) { + if (this !== process.stderr) debug("close handle"); + const isException = exception ? true : false; + // `bytesRead` and `kBytesWritten` should be accessible after `.destroy()` + this[kBytesRead] = this._handle.bytesRead; + this[kBytesWritten] = this._handle.bytesWritten; + + if (this.resetAndClosing) { + this.resetAndClosing = false; + const err = this._handle.reset(() => { + debug("emit close"); + this.emit("close", isException); + }); + if (err) this.emit("error", new ErrnoException(err, "reset")); + } else if (this._closeAfterHandlingError) { + // Enqueue closing the socket as a microtask, so that the socket can be + // accessible when an `error` event is handled in the `next tick queue`. + queueMicrotask(() => closeSocketHandle(this, isException, true)); + } else { + closeSocketHandle(this, isException); + } + + if (!this._closeAfterHandlingError) { + this._handle.onread = noop; + this._handle = null; + this._sockname = null; + } + cb(exception); + } else { + cb(exception); + process.nextTick(emitCloseNT, this); + } + + if (this._server) { + debug("has server"); + this._server._connections--; + if (this._server._emitCloseIfDrained) { + this._server._emitCloseIfDrained(); + } + } +}; + +Socket.prototype._reset = function () { + debug("reset connection"); + this.resetAndClosing = true; + return this.destroy(); +}; + +Socket.prototype._getpeername = function () { + if (!this._handle || !this._handle.getpeername || this.connecting) { + return this._peername || {}; + } else if (!this._peername) { + const out = {}; + const err = this._handle.getpeername(out); + if (err) return out; + this._peername = out; + } + return this._peername; +}; + +function protoGetter(name, callback) { + ObjectDefineProperty(Socket.prototype, name, { + __proto__: null, + configurable: false, + enumerable: true, + get: callback, + }); +} + +protoGetter("bytesRead", function bytesRead() { + return this._handle ? this._handle.bytesRead : this[kBytesRead]; +}); + +protoGetter("remoteAddress", function remoteAddress() { + return this._getpeername().address; +}); + +protoGetter("remoteFamily", function remoteFamily() { + return this._getpeername().family; +}); + +protoGetter("remotePort", function remotePort() { + return this._getpeername().port; +}); + +Socket.prototype._getsockname = function () { + if (!this._handle || !this._handle.getsockname) { + return {}; + } else if (!this._sockname) { + this._sockname = {}; + // FIXME(bnoordhuis) Throw when the return value is not 0? + this._handle.getsockname(this._sockname); + } + return this._sockname; +}; + +protoGetter("localAddress", function localAddress() { + return this._getsockname().address; +}); + +protoGetter("localPort", function localPort() { + return this._getsockname().port; +}); + +protoGetter("localFamily", function localFamily() { + return this._getsockname().family; +}); + +Socket.prototype[kAfterAsyncWrite] = function () { + this[kLastWriteQueueSize] = 0; +}; + +Socket.prototype._writeGeneric = function (writev, data, encoding, cb) { + // If we are still connecting, then buffer this for later. + // The Writable logic will buffer up any more writes while + // waiting for this one to be done. + if (this.connecting) { + this._pendingData = data; + this._pendingEncoding = encoding; + this.once("connect", function connect() { + this.off("close", onClose); + this._writeGeneric(writev, data, encoding, cb); + }); + function onClose() { + cb(new ERR_SOCKET_CLOSED_BEFORE_CONNECTION()); + } + this.once("close", onClose); + return; + } + this._pendingData = null; + this._pendingEncoding = ""; + + if (!this._handle) { + cb(new ERR_SOCKET_CLOSED()); + return false; + } + + this._unrefTimer(); + + let req; + if (writev) req = writevGeneric(this, data, cb); + else req = writeGeneric(this, data, encoding, cb); + if (req.async) this[kLastWriteQueueSize] = req.bytes; +}; + +Socket.prototype._writev = function (chunks, cb) { + this._writeGeneric(true, chunks, "", cb); +}; + +Socket.prototype._write = function (data, encoding, cb) { + this._writeGeneric(false, data, encoding, cb); +}; + +// Legacy alias. Having this is probably being overly cautious, but it doesn't +// really hurt anyone either. This can probably be removed safely if desired. +protoGetter("_bytesDispatched", function _bytesDispatched() { + return this._handle ? this._handle.bytesWritten : this[kBytesWritten]; +}); + +protoGetter("bytesWritten", function bytesWritten() { + let bytes = this._bytesDispatched; + const data = this._pendingData; + const encoding = this._pendingEncoding; + const writableBuffer = this.writableBuffer; + + if (!writableBuffer) return undefined; + + for (const el of writableBuffer) { + bytes += + el.chunk instanceof Buffer + ? el.chunk.length + : Buffer.byteLength(el.chunk, el.encoding); + } + + if (ArrayIsArray(data)) { + // Was a writev, iterate over chunks to get total length + for (let i = 0; i < data.length; i++) { + const chunk = data[i]; + + if (data.allBuffers || chunk instanceof Buffer) bytes += chunk.length; + else bytes += Buffer.byteLength(chunk.chunk, chunk.encoding); + } + } else if (data) { + // Writes are either a string or a Buffer. + if (typeof data !== "string") bytes += data.length; + else bytes += Buffer.byteLength(data, encoding); + } + + return bytes; +}); + +function checkBindError(err, port, handle) { + // EADDRINUSE may not be reported until we call listen() or connect(). + // To complicate matters, a failed bind() followed by listen() or connect() + // will implicitly bind to a random port. Ergo, check that the socket is + // bound to the expected port before calling listen() or connect(). + // + // FIXME(bnoordhuis) Doesn't work for pipe handles, they don't have a + // getsockname() method. Non-issue for now, the cluster module doesn't + // really support pipes anyway. + if (err === 0 && port > 0 && handle.getsockname) { + const out = {}; + err = handle.getsockname(out); + if (err === 0 && port !== out.port) { + debug(`checkBindError, bound to ${out.port} instead of ${port}`); + err = UV_EADDRINUSE; + } + } + return err; +} + +function internalConnect( + self, + address, + port, + addressType, + localAddress, + localPort, + flags +) { + // TODO return promise from Socket.prototype.connect which + // wraps _connectReq. + + assert(self.connecting); + + let err; + + if (localAddress || localPort) { + if (addressType === 4) { + localAddress ||= DEFAULT_IPV4_ADDR; + err = self._handle.bind(localAddress, localPort); + } else { + // addressType === 6 + localAddress ||= DEFAULT_IPV6_ADDR; + err = self._handle.bind6(localAddress, localPort, flags); + } + debug( + "connect: binding to localAddress: %s and localPort: %d (addressType: %d)", + localAddress, + localPort, + addressType + ); + + err = checkBindError(err, localPort, self._handle); + if (err) { + const ex = new ExceptionWithHostPort( + err, + "bind", + localAddress, + localPort + ); + self.destroy(ex); + return; + } + } + + debug( + "connect: attempting to connect to %s:%d (addressType: %d)", + address, + port, + addressType + ); + self.emit("connectionAttempt", address, port, addressType); + + if (addressType === 6 || addressType === 4) { + if (self.blockList?.check(address, `ipv${addressType}`)) { + self.destroy(new ERR_IP_BLOCKED(address)); + return; + } + const req = new TCPConnectWrap(); + req.oncomplete = afterConnect; + req.address = address; + req.port = port; + req.localAddress = localAddress; + req.localPort = localPort; + req.addressType = addressType; + + if (addressType === 4) err = self._handle.connect(req, address, port); + else err = self._handle.connect6(req, address, port); + } else { + const req = new PipeConnectWrap(); + req.address = address; + req.oncomplete = afterConnect; + + err = self._handle.connect(req, address); + } + + if (err) { + const sockname = self._getsockname(); + let details; + + if (sockname) { + details = sockname.address + ":" + sockname.port; + } + + const ex = new ExceptionWithHostPort( + err, + "connect", + address, + port, + details + ); + self.destroy(ex); + } else if ((addressType === 6 || addressType === 4) && hasObserver("net")) { + startPerf(self, kPerfHooksNetConnectContext, { + type: "net", + name: "connect", + detail: { host: address, port }, + }); + } +} + +function internalConnectMultiple(context, canceled) { + clearTimeout(context[kTimeout]); + const self = context.socket; + + // We were requested to abort. Stop all operations + if (self._aborted) { + return; + } + + // All connections have been tried without success, destroy with error + if (canceled || context.current === context.addresses.length) { + if (context.errors.length === 0) { + self.destroy(new ERR_SOCKET_CONNECTION_TIMEOUT()); + return; + } + + self.destroy(new NodeAggregateError(context.errors)); + return; + } + + assert(self.connecting); + + const current = context.current++; + + if (current > 0) { + self[kReinitializeHandle](new TCP(TCPConstants.SOCKET)); + } + + const { localPort, port, flags } = context; + const { address, family: addressType } = context.addresses[current]; + let localAddress; + let err; + + if (localPort) { + if (addressType === 4) { + localAddress = DEFAULT_IPV4_ADDR; + err = self._handle.bind(localAddress, localPort); + } else { + // addressType === 6 + localAddress = DEFAULT_IPV6_ADDR; + err = self._handle.bind6(localAddress, localPort, flags); + } + + debug( + "connect/multiple: binding to localAddress: %s and localPort: %d (addressType: %d)", + localAddress, + localPort, + addressType + ); + + err = checkBindError(err, localPort, self._handle); + if (err) { + ArrayPrototypePush( + context.errors, + new ExceptionWithHostPort(err, "bind", localAddress, localPort) + ); + internalConnectMultiple(context); + return; + } + } + + if (self.blockList?.check(address, `ipv${addressType}`)) { + const ex = new ERR_IP_BLOCKED(address); + ArrayPrototypePush(context.errors, ex); + self.emit("connectionAttemptFailed", address, port, addressType, ex); + internalConnectMultiple(context); + return; + } + + debug( + "connect/multiple: attempting to connect to %s:%d (addressType: %d)", + address, + port, + addressType + ); + self.emit("connectionAttempt", address, port, addressType); + + const req = new TCPConnectWrap(); + req.oncomplete = FunctionPrototypeBind( + afterConnectMultiple, + undefined, + context, + current + ); + req.address = address; + req.port = port; + req.localAddress = localAddress; + req.localPort = localPort; + req.addressType = addressType; + + ArrayPrototypePush( + self.autoSelectFamilyAttemptedAddresses, + `${address}:${port}` + ); + + if (addressType === 4) { + err = self._handle.connect(req, address, port); + } else { + err = self._handle.connect6(req, address, port); + } + + if (err) { + const sockname = self._getsockname(); + let details; + + if (sockname) { + details = sockname.address + ":" + sockname.port; + } + + const ex = new ExceptionWithHostPort( + err, + "connect", + address, + port, + details + ); + ArrayPrototypePush(context.errors, ex); + + self.emit("connectionAttemptFailed", address, port, addressType, ex); + internalConnectMultiple(context); + return; + } + + if (current < context.addresses.length - 1) { + debug( + "connect/multiple: setting the attempt timeout to %d ms", + context.timeout + ); + + // If the attempt has not returned an error, start the connection timer + context[kTimeout] = setTimeout( + internalConnectMultipleTimeout, + context.timeout, + context, + req, + self._handle + ); + } +} + +Socket.prototype.connect = function (...args) { + let normalized; + // If passed an array, it's treated as an array of arguments that have + // already been normalized (so we don't normalize more than once). This has + // been solved before in https://github.com/nodejs/node/pull/12342, but was + // reverted as it had unintended side effects. + if (ArrayIsArray(args[0]) && args[0][normalizedArgsSymbol]) { + normalized = args[0]; + } else { + normalized = normalizeArgs(args); + } + const options = normalized[0]; + const cb = normalized[1]; + + if (netClientSocketChannel.hasSubscribers) { + netClientSocketChannel.publish({ + socket: this, + }); + } + + if (cb !== null) { + this.once("connect", cb); + } + + // If the parent is already connecting, do not attempt to connect again + if (this._parent?.connecting) { + return this; + } + + // options.port === null will be checked later. + if (options.port === undefined && options.path == null) + throw new ERR_MISSING_ARGS(["options", "port", "path"]); + + if (this.write !== Socket.prototype.write) + this.write = Socket.prototype.write; + + if (this.destroyed) { + this._handle = null; + this._peername = null; + this._sockname = null; + } + + const { path } = options; + const pipe = !!path; + debug("pipe", pipe, path); + + if (!this._handle) { + this._handle = pipe + ? new Pipe(PipeConstants.SOCKET) + : new TCP(TCPConstants.SOCKET); + initSocketHandle(this); + } + + this._unrefTimer(); + + this.connecting = true; + + if (pipe) { + validateString(path, "options.path"); + defaultTriggerAsyncIdScope( + this[async_id_symbol], + internalConnect, + this, + path + ); + } else { + lookupAndConnect(this, options); + } + return this; +}; + +Socket.prototype[kReinitializeHandle] = function reinitializeHandle(handle) { + this._handle?.close(); + + this._handle = handle; + this._handle[owner_symbol] = this; + + initSocketHandle(this); +}; + +function socketToDnsFamily(family) { + switch (family) { + case "IPv4": + return 4; + case "IPv6": + return 6; + } + + return family; +} + +function lookupAndConnect(self, options) { + const { localAddress, localPort } = options; + const host = options.host || "localhost"; + let { port, autoSelectFamilyAttemptTimeout, autoSelectFamily } = options; + + validateString(host, "options.host"); + + if (localAddress && !isIP(localAddress)) { + throw new ERR_INVALID_IP_ADDRESS(localAddress); + } + + if (localPort) { + validateNumber(localPort, "options.localPort"); + } + + if (port !== undefined) { + if (typeof port !== "number" && typeof port !== "string") { + throw new ERR_INVALID_ARG_TYPE( + "options.port", + ["number", "string"], + port + ); + } + validatePort(port); + } + port |= 0; + + if (autoSelectFamily != null) { + validateBoolean(autoSelectFamily, "options.autoSelectFamily"); + } else { + autoSelectFamily = autoSelectFamilyDefault; + } + + if (autoSelectFamilyAttemptTimeout != null) { + validateInt32( + autoSelectFamilyAttemptTimeout, + "options.autoSelectFamilyAttemptTimeout", + 1 + ); + + if (autoSelectFamilyAttemptTimeout < 10) { + autoSelectFamilyAttemptTimeout = 10; + } + } else { + autoSelectFamilyAttemptTimeout = autoSelectFamilyAttemptTimeoutDefault; + } + + // If host is an IP, skip performing a lookup + const addressType = isIP(host); + if (addressType) { + defaultTriggerAsyncIdScope(self[async_id_symbol], process.nextTick, () => { + if (self.connecting) + defaultTriggerAsyncIdScope( + self[async_id_symbol], + internalConnect, + self, + host, + port, + addressType, + localAddress, + localPort + ); + }); + return; + } + + if (options.lookup != null) + validateFunction(options.lookup, "options.lookup"); + + if (dns === undefined) dns = require("dns"); + const dnsopts = { + family: socketToDnsFamily(options.family), + hints: options.hints || 0, + }; + + if ( + !isWindows && + dnsopts.family !== 4 && + dnsopts.family !== 6 && + dnsopts.hints === 0 + ) { + dnsopts.hints = dns.ADDRCONFIG; + } + + debug("connect: find host", host); + debug("connect: dns options", dnsopts); + self._host = host; + const lookup = options.lookup || dns.lookup; + + if ( + dnsopts.family !== 4 && + dnsopts.family !== 6 && + !localAddress && + autoSelectFamily + ) { + debug("connect: autodetecting"); + + dnsopts.all = true; + defaultTriggerAsyncIdScope(self[async_id_symbol], function () { + lookupAndConnectMultiple( + self, + async_id_symbol, + lookup, + host, + options, + dnsopts, + port, + localAddress, + localPort, + autoSelectFamilyAttemptTimeout + ); + }); + + return; + } + + defaultTriggerAsyncIdScope(self[async_id_symbol], function () { + lookup(host, dnsopts, function emitLookup(err, ip, addressType) { + self.emit("lookup", err, ip, addressType, host); + + // It's possible we were destroyed while looking this up. + // XXX it would be great if we could cancel the promise returned by + // the look up. + if (!self.connecting) return; + + if (err) { + // net.createConnection() creates a net.Socket object and immediately + // calls net.Socket.connect() on it (that's us). There are no event + // listeners registered yet so defer the error event to the next tick. + process.nextTick(connectErrorNT, self, err); + } else if (typeof ip !== "string" || !isIP(ip)) { + err = new ERR_INVALID_IP_ADDRESS(ip); + process.nextTick(connectErrorNT, self, err); + } else if (addressType !== 4 && addressType !== 6) { + err = new ERR_INVALID_ADDRESS_FAMILY( + addressType, + options.host, + options.port + ); + process.nextTick(connectErrorNT, self, err); + } else { + self._unrefTimer(); + defaultTriggerAsyncIdScope( + self[async_id_symbol], + internalConnect, + self, + ip, + port, + addressType, + localAddress, + localPort + ); + } + }); + }); +} + +function lookupAndConnectMultiple( + self, + async_id_symbol, + lookup, + host, + options, + dnsopts, + port, + localAddress, + localPort, + timeout +) { + defaultTriggerAsyncIdScope(self[async_id_symbol], function emitLookup() { + lookup(host, dnsopts, function emitLookup(err, addresses) { + // It's possible we were destroyed while looking this up. + // XXX it would be great if we could cancel the promise returned by + // the look up. + if (!self.connecting) { + return; + } else if (err) { + self.emit("lookup", err, undefined, undefined, host); + + // net.createConnection() creates a net.Socket object and immediately + // calls net.Socket.connect() on it (that's us). There are no event + // listeners registered yet so defer the error event to the next tick. + process.nextTick(connectErrorNT, self, err); + return; + } + + // Filter addresses by only keeping the one which are either IPv4 or IPV6. + // The first valid address determines which group has preference on the + // alternate family sorting which happens later. + const validAddresses = [[], []]; + const validIps = [[], []]; + let destinations; + for (let i = 0, l = addresses.length; i < l; i++) { + const address = addresses[i]; + const { address: ip, family: addressType } = address; + self.emit("lookup", err, ip, addressType, host); + // It's possible we were destroyed while looking this up. + if (!self.connecting) { + return; + } + if (isIP(ip) && (addressType === 4 || addressType === 6)) { + destinations ||= addressType === 6 ? { 6: 0, 4: 1 } : { 4: 0, 6: 1 }; + + const destination = destinations[addressType]; + + // Only try an address once + if (!ArrayPrototypeIncludes(validIps[destination], ip)) { + ArrayPrototypePush(validAddresses[destination], address); + ArrayPrototypePush(validIps[destination], ip); + } + } + } + + // When no AAAA or A records are available, fail on the first one + if (!validAddresses[0].length && !validAddresses[1].length) { + const { address: firstIp, family: firstAddressType } = addresses[0]; + + if (!isIP(firstIp)) { + err = new ERR_INVALID_IP_ADDRESS(firstIp); + process.nextTick(connectErrorNT, self, err); + } else if (firstAddressType !== 4 && firstAddressType !== 6) { + err = new ERR_INVALID_ADDRESS_FAMILY( + firstAddressType, + options.host, + options.port + ); + process.nextTick(connectErrorNT, self, err); + } + + return; + } + + // Sort addresses alternating families + const toAttempt = []; + for ( + let i = 0, + l = MathMax(validAddresses[0].length, validAddresses[1].length); + i < l; + i++ + ) { + if (i in validAddresses[0]) { + ArrayPrototypePush(toAttempt, validAddresses[0][i]); + } + if (i in validAddresses[1]) { + ArrayPrototypePush(toAttempt, validAddresses[1][i]); + } + } + + if (toAttempt.length === 1) { + debug( + "connect/multiple: only one address found, switching back to single connection" + ); + const { address: ip, family: addressType } = toAttempt[0]; + + self._unrefTimer(); + defaultTriggerAsyncIdScope( + self[async_id_symbol], + internalConnect, + self, + ip, + port, + addressType, + localAddress, + localPort + ); + + return; + } + + self.autoSelectFamilyAttemptedAddresses = []; + debug("connect/multiple: will try the following addresses", toAttempt); + + const context = { + socket: self, + addresses: toAttempt, + current: 0, + port, + localPort, + timeout, + [kTimeout]: null, + errors: [], + }; + + self._unrefTimer(); + defaultTriggerAsyncIdScope( + self[async_id_symbol], + internalConnectMultiple, + context + ); + }); + }); +} + +function connectErrorNT(self, err) { + self.destroy(err); +} + +Socket.prototype.ref = function () { + if (!this._handle) { + this.once("connect", this.ref); + return this; + } + + if (typeof this._handle.ref === "function") { + this._handle.ref(); + } + + return this; +}; + +Socket.prototype.unref = function () { + if (!this._handle) { + this.once("connect", this.unref); + return this; + } + + if (typeof this._handle.unref === "function") { + this._handle.unref(); + } + + return this; +}; + +function afterConnect(status, handle, req, readable, writable) { + const self = handle[owner_symbol]; + + // Callback may come after call to destroy + if (self.destroyed) { + return; + } + + debug("afterConnect"); + + assert(self.connecting); + self.connecting = false; + self._sockname = null; + + if (status === 0) { + if (self.readable && !readable) { + self.push(null); + self.read(); + } + if (self.writable && !writable) { + self.end(); + } + self._unrefTimer(); + + if (self[kSetNoDelay] && self._handle.setNoDelay) { + self._handle.setNoDelay(true); + } + + if (self[kSetKeepAlive] && self._handle.setKeepAlive) { + self._handle.setKeepAlive(true, self[kSetKeepAliveInitialDelay]); + } + + self.emit("connect"); + self.emit("ready"); + + // Start the first read, or get an immediate EOF. + // this doesn't actually consume any bytes, because len=0. + if (readable && !self.isPaused()) self.read(0); + if (self[kPerfHooksNetConnectContext] && hasObserver("net")) { + stopPerf(self, kPerfHooksNetConnectContext); + } + } else { + let details; + if (req.localAddress && req.localPort) { + details = req.localAddress + ":" + req.localPort; + } + const ex = new ExceptionWithHostPort( + status, + "connect", + req.address, + req.port, + details + ); + if (details) { + ex.localAddress = req.localAddress; + ex.localPort = req.localPort; + } + + self.emit( + "connectionAttemptFailed", + req.address, + req.port, + req.addressType, + ex + ); + self.destroy(ex); + } +} + +function addClientAbortSignalOption(self, options) { + validateAbortSignal(options.signal, "options.signal"); + const { signal } = options; + let disposable; + + function onAbort() { + disposable?.[SymbolDispose](); + self._aborted = true; + } + + if (signal.aborted) { + process.nextTick(onAbort); + } else { + process.nextTick(() => { + disposable = addAbortListener(signal, onAbort); + }); + } +} + +function createConnectionError(req, status) { + let details; + + if (req.localAddress && req.localPort) { + details = req.localAddress + ":" + req.localPort; + } + + const ex = new ExceptionWithHostPort( + status, + "connect", + req.address, + req.port, + details + ); + if (details) { + ex.localAddress = req.localAddress; + ex.localPort = req.localPort; + } + + return ex; +} + +function afterConnectMultiple( + context, + current, + status, + handle, + req, + readable, + writable +) { + debug( + "connect/multiple: connection attempt to %s:%s completed with status %s", + req.address, + req.port, + status + ); + + // Make sure another connection is not spawned + clearTimeout(context[kTimeout]); + + // One of the connection has completed and correctly dispatched but after timeout, ignore this one + if (status === 0 && current !== context.current - 1) { + debug( + "connect/multiple: ignoring successful but timedout connection to %s:%s", + req.address, + req.port + ); + handle.close(); + return; + } + + const self = context.socket; + + // Some error occurred, add to the list of exceptions + if (status !== 0) { + const ex = createConnectionError(req, status); + ArrayPrototypePush(context.errors, ex); + + self.emit( + "connectionAttemptFailed", + req.address, + req.port, + req.addressType, + ex + ); + + // Try the next address, unless we were aborted + if (context.socket.connecting) { + internalConnectMultiple(context, status === UV_ECANCELED); + } + + return; + } + + if (hasObserver("net")) { + startPerf(self, kPerfHooksNetConnectContext, { + type: "net", + name: "connect", + detail: { host: req.address, port: req.port }, + }); + } + + afterConnect(status, self._handle, req, readable, writable); +} + +function internalConnectMultipleTimeout(context, req, handle) { + debug( + "connect/multiple: connection to %s:%s timed out", + req.address, + req.port + ); + context.socket.emit( + "connectionAttemptTimeout", + req.address, + req.port, + req.addressType + ); + + req.oncomplete = undefined; + ArrayPrototypePush(context.errors, createConnectionError(req, UV_ETIMEDOUT)); + handle.close(); + + // Try the next address, unless we were aborted + if (context.socket.connecting) { + internalConnectMultiple(context); + } +} + +function addServerAbortSignalOption(self, options) { + if (options?.signal === undefined) { + return; + } + validateAbortSignal(options.signal, "options.signal"); + const { signal } = options; + const onAborted = () => { + self.close(); + }; + if (signal.aborted) { + process.nextTick(onAborted); + } else { + const disposable = addAbortListener(signal, onAborted); + self.once("close", disposable[SymbolDispose]); + } +} + +function Server(options, connectionListener) { + if (!(this instanceof Server)) return new Server(options, connectionListener); + + EventEmitter.call(this); + + if (typeof options === "function") { + connectionListener = options; + options = kEmptyObject; + this.on("connection", connectionListener); + } else if (options == null || typeof options === "object") { + options = { ...options }; + + if (typeof connectionListener === "function") { + this.on("connection", connectionListener); + } + } else { + throw new ERR_INVALID_ARG_TYPE("options", "Object", options); + } + if (options.keepAliveInitialDelay !== undefined) { + validateNumber( + options.keepAliveInitialDelay, + "options.keepAliveInitialDelay" + ); + + if (options.keepAliveInitialDelay < 0) { + options.keepAliveInitialDelay = 0; + } + } + if (options.highWaterMark !== undefined) { + validateNumber(options.highWaterMark, "options.highWaterMark"); + + if (options.highWaterMark < 0) { + options.highWaterMark = getDefaultHighWaterMark(); + } + } + + this._connections = 0; + + this[async_id_symbol] = -1; + this._handle = null; + this._usingWorkers = false; + this._workers = []; + this._unref = false; + this._listeningId = 1; + + this.allowHalfOpen = options.allowHalfOpen || false; + this.pauseOnConnect = !!options.pauseOnConnect; + this.noDelay = Boolean(options.noDelay); + this.keepAlive = Boolean(options.keepAlive); + this.keepAliveInitialDelay = ~~(options.keepAliveInitialDelay / 1000); + this.highWaterMark = options.highWaterMark ?? getDefaultHighWaterMark(); + if (options.blockList) { + if (!module.exports.BlockList.isBlockList(options.blockList)) { + throw new ERR_INVALID_ARG_TYPE( + "options.blockList", + "net.BlockList", + options.blockList + ); + } + this.blockList = options.blockList; + } +} +ObjectSetPrototypeOf(Server.prototype, EventEmitter.prototype); +ObjectSetPrototypeOf(Server, EventEmitter); + +function toNumber(x) { + return (x = Number(x)) >= 0 ? x : false; +} + +// Returns handle if it can be created, or error code if it can't +function createServerHandle(address, port, addressType, fd, flags) { + let err = 0; + // Assign handle in listen, and clean up if bind or listen fails + let handle; + + let isTCP = false; + if (typeof fd === "number" && fd >= 0) { + try { + handle = createHandle(fd, true); + } catch (e) { + // Not a fd we can listen on. This will trigger an error. + debug("listen invalid fd=%d:", fd, e.message); + return UV_EINVAL; + } + + err = handle.open(fd); + if (err) return err; + + assert(!address && !port); + } else if (port === -1 && addressType === -1) { + handle = new Pipe(PipeConstants.SERVER); + if (isWindows) { + const instances = NumberParseInt(process.env.NODE_PENDING_PIPE_INSTANCES); + if (!NumberIsNaN(instances)) { + handle.setPendingInstances(instances); + } + } + } else { + handle = new TCP(TCPConstants.SERVER); + isTCP = true; + } + + if (address || port || isTCP) { + debug("bind to", address || "any"); + if (!address) { + // Try binding to ipv6 first + err = handle.bind6(DEFAULT_IPV6_ADDR, port, flags); + if (err) { + handle.close(); + // Fallback to ipv4 + return createServerHandle( + DEFAULT_IPV4_ADDR, + port, + undefined, + undefined, + flags + ); + } + } else if (addressType === 6) { + err = handle.bind6(address, port, flags); + } else { + err = handle.bind(address, port, flags); + } + } + + if (err) { + handle.close(); + return err; + } + + return handle; +} + +function setupListenHandle(address, port, addressType, backlog, fd, flags) { + debug("setupListenHandle", address, port, addressType, backlog, fd); + + // If there is not yet a handle, we need to create one and bind. + // In the case of a server sent via IPC, we don't need to do this. + if (this._handle) { + debug("setupListenHandle: have a handle already"); + } else { + debug("setupListenHandle: create a handle"); + + let rval = null; + + // Try to bind to the unspecified IPv6 address, see if IPv6 is available + if (!address && typeof fd !== "number") { + rval = createServerHandle(DEFAULT_IPV6_ADDR, port, 6, fd, flags); + + if (typeof rval === "number") { + rval = null; + address = DEFAULT_IPV4_ADDR; + addressType = 4; + } else { + address = DEFAULT_IPV6_ADDR; + addressType = 6; + } + } + + if (rval === null) + rval = createServerHandle(address, port, addressType, fd, flags); + + if (typeof rval === "number") { + const error = new UVExceptionWithHostPort(rval, "listen", address, port); + + if (netServerListen.hasSubscribers) { + netServerListen.error.publish({ server: this, error }); + } + + process.nextTick(emitErrorNT, this, error); + return; + } + this._handle = rval; + } + + this[async_id_symbol] = getNewAsyncId(this._handle); + this._handle.onconnection = onconnection; + this._handle[owner_symbol] = this; + + // Use a backlog of 512 entries. We pass 511 to the listen() call because + // the kernel does: backlogsize = roundup_pow_of_two(backlogsize + 1); + // which will thus give us a backlog of 512 entries. + const err = this._handle.listen(backlog || 511); + + if (err) { + const ex = new UVExceptionWithHostPort(err, "listen", address, port); + this._handle.close(); + this._handle = null; + + if (netServerListen.hasSubscribers) { + netServerListen.error.publish({ server: this, error: ex }); + } + + defaultTriggerAsyncIdScope( + this[async_id_symbol], + process.nextTick, + emitErrorNT, + this, + ex + ); + return; + } + + if (netServerListen.hasSubscribers) { + netServerListen.asyncEnd.publish({ server: this }); + } + + // Generate connection key, this should be unique to the connection + this._connectionKey = addressType + ":" + address + ":" + port; + + // Unref the handle if the server was unref'ed prior to listening + if (this._unref) this.unref(); + + defaultTriggerAsyncIdScope( + this[async_id_symbol], + process.nextTick, + emitListeningNT, + this + ); +} + +Server.prototype._listen2 = setupListenHandle; // legacy alias + +function emitErrorNT(self, err) { + self.emit("error", err); +} + +function emitListeningNT(self) { + // Ensure handle hasn't closed + if (self._handle) self.emit("listening"); +} + +function listenInCluster( + server, + address, + port, + addressType, + backlog, + fd, + exclusive, + flags, + options +) { + exclusive = !!exclusive; + + if (cluster === undefined) cluster = require("cluster"); + + if (cluster.isPrimary || exclusive) { + // Will create a new handle + // _listen2 sets up the listened handle, it is still named like this + // to avoid breaking code that wraps this method + server._listen2(address, port, addressType, backlog, fd, flags); + return; + } + + const serverQuery = { + address: address, + port: port, + addressType: addressType, + fd: fd, + flags, + backlog, + ...options, + }; + const listeningId = server._listeningId; + // Get the primary's server handle, and listen on it + cluster._getServer(server, serverQuery, listenOnPrimaryHandle); + function listenOnPrimaryHandle(err, handle) { + if (listeningId !== server._listeningId) { + handle.close(); + return; + } + err = checkBindError(err, port, handle); + + if (err) { + const ex = new ExceptionWithHostPort(err, "bind", address, port); + return server.emit("error", ex); + } + // If there was a handle, just close it to avoid fd leak + // but it doesn't look like that's going to happen right now + if (server._handle) { + server._handle.close(); + } + // Reuse primary's server handle + server._handle = handle; + // _listen2 sets up the listened handle, it is still named like this + // to avoid breaking code that wraps this method + server._listen2(address, port, addressType, backlog, fd, flags); + } +} + +Server.prototype.listen = function (...args) { + const normalized = normalizeArgs(args); + let options = normalized[0]; + const cb = normalized[1]; + + if (this._handle) { + throw new ERR_SERVER_ALREADY_LISTEN(); + } + + if (netServerListen.hasSubscribers) { + netServerListen.asyncStart.publish({ server: this, options }); + } + + if (cb !== null) { + this.once("listening", cb); + } + const backlogFromArgs = + // (handle, backlog) or (path, backlog) or (port, backlog) + toNumber(args.length > 1 && args[1]) || + toNumber(args.length > 2 && args[2]); // (port, host, backlog) + + options = options._handle || options.handle || options; + const flags = getFlags(options); + // Refresh the id to make the previous call invalid + this._listeningId++; + // (handle[, backlog][, cb]) where handle is an object with a handle + if (options instanceof TCP) { + this._handle = options; + this[async_id_symbol] = this._handle.getAsyncId(); + listenInCluster(this, null, -1, -1, backlogFromArgs, undefined, true); + return this; + } + addServerAbortSignalOption(this, options); + // (handle[, backlog][, cb]) where handle is an object with a fd + if (typeof options.fd === "number" && options.fd >= 0) { + listenInCluster(this, null, null, null, backlogFromArgs, options.fd); + return this; + } + + // ([port][, host][, backlog][, cb]) where port is omitted, + // that is, listen(), listen(null), listen(cb), or listen(null, cb) + // or (options[, cb]) where options.port is explicitly set as undefined or + // null, bind to an arbitrary unused port + if ( + args.length === 0 || + typeof args[0] === "function" || + (options.port === undefined && "port" in options) || + options.port === null + ) { + options.port = 0; + } + // ([port][, host][, backlog][, cb]) where port is specified + // or (options[, cb]) where options.port is specified + // or if options.port is normalized as 0 before + let backlog; + if (typeof options.port === "number" || typeof options.port === "string") { + validatePort(options.port, "options.port"); + backlog = options.backlog || backlogFromArgs; + if (options.reusePort === true) { + options.exclusive = true; + } + // start TCP server listening on host:port + if (options.host) { + lookupAndListen( + this, + options.port | 0, + options.host, + backlog, + options.exclusive, + flags + ); + } else { + // Undefined host, listens on unspecified address + // Default addressType 4 will be used to search for primary server + listenInCluster( + this, + null, + options.port | 0, + 4, + backlog, + undefined, + options.exclusive, + flags + ); + } + return this; + } + + // (path[, backlog][, cb]) or (options[, cb]) + // where path or options.path is a UNIX domain socket or Windows pipe + if (options.path && isPipeName(options.path)) { + // We can not call fchmod on abstract unix socket + if ( + options.path[0] === "\0" && + (options.readableAll || options.writableAll) + ) { + const msg = + "can not set readableAll or writableAllt to true when path is abstract unix socket"; + throw new ERR_INVALID_ARG_VALUE("options", options, msg); + } + const pipeName = (this._pipeName = options.path); + backlog = options.backlog || backlogFromArgs; + listenInCluster( + this, + pipeName, + -1, + -1, + backlog, + undefined, + options.exclusive, + undefined, + { + readableAll: options.readableAll, + writableAll: options.writableAll, + } + ); + + if (!this._handle) { + // Failed and an error shall be emitted in the next tick. + // Therefore, we directly return. + return this; + } + + let mode = 0; + if (options.readableAll === true) mode |= PipeConstants.UV_READABLE; + if (options.writableAll === true) mode |= PipeConstants.UV_WRITABLE; + if (mode !== 0) { + const err = this._handle.fchmod(mode); + if (err) { + this._handle.close(); + this._handle = null; + throw new ErrnoException(err, "uv_pipe_chmod"); + } + } + return this; + } + + if (!("port" in options || "path" in options)) { + throw new ERR_INVALID_ARG_VALUE( + "options", + options, + 'must have the property "port" or "path"' + ); + } + + throw new ERR_INVALID_ARG_VALUE("options", options); +}; + +function isIpv6LinkLocal(ip) { + if (!isIPv6(ip)) { + return false; + } + + const ipv6Buffer = convertIpv6StringToBuffer(ip); + const firstByte = ipv6Buffer[0]; // The first 8 bits + const secondByte = ipv6Buffer[1]; // The next 8 bits + + // The link-local prefix is `1111111010`, which in hexadecimal is `fe80` + // First 8 bits (firstByte) should be `11111110` (0xfe) + // The next 2 bits of the second byte should be `10` (0x80) + + const isFirstByteCorrect = firstByte === 0xfe; // 0b11111110 == 0xfe + const isSecondByteCorrect = (secondByte & 0xc0) === 0x80; // 0b10xxxxxx == 0x80 + + return isFirstByteCorrect && isSecondByteCorrect; +} + +function filterOnlyValidAddress(addresses) { + // Return the first non IPV6 link-local address if present + for (const address of addresses) { + if (!isIpv6LinkLocal(address.address)) { + return address; + } + } + + // Otherwise return the first address + return addresses[0]; +} + +function lookupAndListen(self, port, address, backlog, exclusive, flags) { + if (dns === undefined) dns = require("dns"); + const listeningId = self._listeningId; + + dns.lookup(address, { all: true }, (err, addresses) => { + if (listeningId !== self._listeningId) { + return; + } + if (err) { + self.emit("error", err); + } else { + const validAddress = filterOnlyValidAddress(addresses); + const family = validAddress?.family || 4; + + listenInCluster( + self, + validAddress.address, + port, + family, + backlog, + undefined, + exclusive, + flags + ); + } + }); +} + +ObjectDefineProperty(Server.prototype, "listening", { + __proto__: null, + get: function () { + return !!this._handle; + }, + configurable: true, + enumerable: true, +}); + +Server.prototype.address = function () { + if (this._handle?.getsockname) { + const out = {}; + const err = this._handle.getsockname(out); + if (err) { + throw new ErrnoException(err, "address"); + } + return out; + } else if (this._pipeName) { + return this._pipeName; + } + return null; +}; + +function onconnection(err, clientHandle) { + const handle = this; + const self = handle[owner_symbol]; + + debug("onconnection"); + + if (err) { + self.emit("error", new ErrnoException(err, "accept")); + return; + } + + if (self.maxConnections != null && self._connections >= self.maxConnections) { + if (clientHandle.getsockname || clientHandle.getpeername) { + const data = { __proto__: null }; + if (clientHandle.getsockname) { + const localInfo = { __proto__: null }; + clientHandle.getsockname(localInfo); + data.localAddress = localInfo.address; + data.localPort = localInfo.port; + data.localFamily = localInfo.family; + } + if (clientHandle.getpeername) { + const remoteInfo = { __proto__: null }; + clientHandle.getpeername(remoteInfo); + data.remoteAddress = remoteInfo.address; + data.remotePort = remoteInfo.port; + data.remoteFamily = remoteInfo.family; + } + self.emit("drop", data); + } else { + self.emit("drop"); + } + clientHandle.close(); + return; + } + if (self.blockList && typeof clientHandle.getpeername === "function") { + const remoteInfo = { __proto__: null }; + clientHandle.getpeername(remoteInfo); + const addressType = isIP(remoteInfo.address); + if ( + addressType && + self.blockList.check(remoteInfo.address, `ipv${addressType}`) + ) { + clientHandle.close(); + return; + } + } + const socket = new Socket({ + handle: clientHandle, + allowHalfOpen: self.allowHalfOpen, + pauseOnCreate: self.pauseOnConnect, + readable: true, + writable: true, + readableHighWaterMark: self.highWaterMark, + writableHighWaterMark: self.highWaterMark, + }); + + if (self.noDelay && clientHandle.setNoDelay) { + socket[kSetNoDelay] = true; + clientHandle.setNoDelay(true); + } + if (self.keepAlive && clientHandle.setKeepAlive) { + socket[kSetKeepAlive] = true; + socket[kSetKeepAliveInitialDelay] = self.keepAliveInitialDelay; + clientHandle.setKeepAlive(true, self.keepAliveInitialDelay); + } + + self._connections++; + socket.server = self; + socket._server = self; + self.emit("connection", socket); + if (netServerSocketChannel.hasSubscribers) { + netServerSocketChannel.publish({ + socket, + }); + } +} + +/** + * Gets the number of concurrent connections on the server + * @param {Function} cb + * @returns {Server} + */ + +Server.prototype.getConnections = function (cb) { + const self = this; + + function end(err, connections) { + defaultTriggerAsyncIdScope( + self[async_id_symbol], + process.nextTick, + cb, + err, + connections + ); + } + + if (!this._usingWorkers) { + end(null, this._connections); + return this; + } + + // Poll workers + let left = this._workers.length; + let total = this._connections; + + function oncount(err, count) { + if (err) { + left = -1; + return end(err); + } + + total += count; + if (--left === 0) return end(null, total); + } + + for (let n = 0; n < this._workers.length; n++) { + this._workers[n].getConnections(oncount); + } + + return this; +}; + +Server.prototype.close = function (cb) { + this._listeningId++; + if (typeof cb === "function") { + if (!this._handle) { + this.once("close", function close() { + cb(new ERR_SERVER_NOT_RUNNING()); + }); + } else { + this.once("close", cb); + } + } + + if (this._handle) { + this._handle.close(); + this._handle = null; + } + + if (this._usingWorkers) { + let left = this._workers.length; + const onWorkerClose = () => { + if (--left !== 0) return; + + this._connections = 0; + this._emitCloseIfDrained(); + }; + + // Increment connections to be sure that, even if all sockets will be closed + // during polling of workers, `close` event will be emitted only once. + this._connections++; + + // Poll workers + for (let n = 0; n < this._workers.length; n++) + this._workers[n].close(onWorkerClose); + } else { + this._emitCloseIfDrained(); + } + + return this; +}; + +Server.prototype[SymbolAsyncDispose] = async function () { + if (!this._handle) { + return; + } + await FunctionPrototypeCall(promisify(this.close), this); +}; + +Server.prototype._emitCloseIfDrained = function () { + debug("SERVER _emitCloseIfDrained"); + + if (this._handle || this._connections) { + debug( + "SERVER handle? %j connections? %d", + !!this._handle, + this._connections + ); + return; + } + + defaultTriggerAsyncIdScope( + this[async_id_symbol], + process.nextTick, + emitCloseNT, + this + ); +}; + +function emitCloseNT(self) { + debug("SERVER: emit close"); + self.emit("close"); +} + +Server.prototype[EventEmitter.captureRejectionSymbol] = function ( + err, + event, + sock +) { + switch (event) { + case "connection": + sock.destroy(err); + break; + default: + this.emit("error", err); + } +}; + +// Legacy alias on the C++ wrapper object. This is not public API, so we may +// want to runtime-deprecate it at some point. There's no hurry, though. +ObjectDefineProperty(TCP.prototype, "owner", { + __proto__: null, + get() { + return this[owner_symbol]; + }, + set(v) { + return (this[owner_symbol] = v); + }, +}); + +ObjectDefineProperty(Socket.prototype, "_handle", { + __proto__: null, + get() { + return this[kHandle]; + }, + set(v) { + return (this[kHandle] = v); + }, +}); + +Server.prototype._setupWorker = function (socketList) { + this._usingWorkers = true; + this._workers.push(socketList); + socketList.once("exit", (socketList) => { + const index = ArrayPrototypeIndexOf(this._workers, socketList); + this._workers.splice(index, 1); + }); +}; + +Server.prototype.ref = function () { + this._unref = false; + + if (this._handle) this._handle.ref(); + + return this; +}; + +Server.prototype.unref = function () { + this._unref = true; + + if (this._handle) this._handle.unref(); + + return this; +}; + +module.exports = { + _createServerHandle: createServerHandle, + _normalizeArgs: normalizeArgs, + get BlockList() { + BlockList ??= require("internal/blocklist").BlockList; + return BlockList; + }, + get SocketAddress() { + SocketAddress ??= require("internal/socketaddress").SocketAddress; + return SocketAddress; + }, + connect, + createConnection: connect, + createServer, + isIP: isIP, + isIPv4: isIPv4, + isIPv6: isIPv6, + Server, + Socket, + Stream: Socket, // Legacy naming + getDefaultAutoSelectFamily, + setDefaultAutoSelectFamily, + getDefaultAutoSelectFamilyAttemptTimeout, + setDefaultAutoSelectFamilyAttemptTimeout, +}; diff --git a/.codesandbox/node/node_sea.cc b/.codesandbox/node/node_sea.cc new file mode 100644 index 00000000..a1184d47 --- /dev/null +++ b/.codesandbox/node/node_sea.cc @@ -0,0 +1,895 @@ +#include "node_sea.h" + +#include "blob_serializer_deserializer-inl.h" +#include "debug_utils-inl.h" +#include "env-inl.h" +#include "node_contextify.h" +#include "node_errors.h" +#include "node_external_reference.h" +#include "node_internals.h" +#include "node_options.h" +#include "node_snapshot_builder.h" +#include "node_union_bytes.h" +#include "node_v8_platform-inl.h" +#include "simdjson.h" +#include "util-inl.h" + +// The POSTJECT_SENTINEL_FUSE macro is a string of random characters selected by +// the Node.js project that is present only once in the entire binary. It is +// used by the postject_has_resource() function to efficiently detect if a +// resource has been injected. See +// https://github.com/nodejs/postject/blob/35343439cac8c488f2596d7c4c1dddfec1fddcae/postject-api.h#L42-L45. +#define POSTJECT_SENTINEL_FUSE "NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2" +#include "postject-api.h" +#undef POSTJECT_SENTINEL_FUSE + +#include +#include +#include +#include + +using node::ExitCode; +using v8::Array; +using v8::ArrayBuffer; +using v8::BackingStore; +using v8::Context; +using v8::Function; +using v8::FunctionCallbackInfo; +using v8::HandleScope; +using v8::Isolate; +using v8::Local; +using v8::LocalVector; +using v8::MaybeLocal; +using v8::NewStringType; +using v8::Object; +using v8::ScriptCompiler; +using v8::ScriptOrigin; +using v8::String; +using v8::Value; + +namespace node { +namespace sea { + +namespace { + +SeaFlags operator|(SeaFlags x, SeaFlags y) { + return static_cast(static_cast(x) | + static_cast(y)); +} + +SeaFlags operator&(SeaFlags x, SeaFlags y) { + return static_cast(static_cast(x) & + static_cast(y)); +} + +SeaFlags operator|=(/* NOLINT (runtime/references) */ SeaFlags& x, SeaFlags y) { + return x = x | y; +} + +class SeaSerializer : public BlobSerializer { + public: + SeaSerializer() + : BlobSerializer( + per_process::enabled_debug_list.enabled(DebugCategory::SEA)) {} + + template ::value>* = nullptr, + std::enable_if_t::value>* = nullptr> + size_t Write(const T& data); +}; + +template <> +size_t SeaSerializer::Write(const SeaResource& sea) { + sink.reserve(SeaResource::kHeaderSize + sea.main_code_or_snapshot.size()); + + Debug("Write SEA magic %x\n", kMagic); + size_t written_total = WriteArithmetic(kMagic); + + uint32_t flags = static_cast(sea.flags); + Debug("Write SEA flags %x\n", flags); + written_total += WriteArithmetic(flags); + + Debug("Write SEA resource exec argv extension %u\n", + static_cast(sea.exec_argv_extension)); + written_total += + WriteArithmetic(static_cast(sea.exec_argv_extension)); + DCHECK_EQ(written_total, SeaResource::kHeaderSize); + + Debug("Write SEA code path %p, size=%zu\n", + sea.code_path.data(), + sea.code_path.size()); + written_total += + WriteStringView(sea.code_path, StringLogMode::kAddressAndContent); + + Debug("Write SEA resource %s %p, size=%zu\n", + sea.use_snapshot() ? "snapshot" : "code", + sea.main_code_or_snapshot.data(), + sea.main_code_or_snapshot.size()); + written_total += + WriteStringView(sea.main_code_or_snapshot, + sea.use_snapshot() ? StringLogMode::kAddressOnly + : StringLogMode::kAddressAndContent); + + if (sea.code_cache.has_value()) { + Debug("Write SEA resource code cache %p, size=%zu\n", + sea.code_cache->data(), + sea.code_cache->size()); + written_total += + WriteStringView(sea.code_cache.value(), StringLogMode::kAddressOnly); + } + + if (!sea.assets.empty()) { + Debug("Write SEA resource assets size %zu\n", sea.assets.size()); + written_total += WriteArithmetic(sea.assets.size()); + for (auto const& [key, content] : sea.assets) { + Debug("Write SEA resource asset %s at %p, size=%zu\n", + key, + content.data(), + content.size()); + written_total += WriteStringView(key, StringLogMode::kAddressAndContent); + written_total += WriteStringView(content, StringLogMode::kAddressOnly); + } + } + + if (static_cast(sea.flags & SeaFlags::kIncludeExecArgv)) { + Debug("Write SEA resource exec argv size %zu\n", sea.exec_argv.size()); + written_total += WriteArithmetic(sea.exec_argv.size()); + for (const auto& arg : sea.exec_argv) { + Debug("Write SEA resource exec arg %s at %p, size=%zu\n", + arg.data(), + arg.data(), + arg.size()); + written_total += WriteStringView(arg, StringLogMode::kAddressAndContent); + } + } + return written_total; +} + +class SeaDeserializer : public BlobDeserializer { + public: + explicit SeaDeserializer(std::string_view v) + : BlobDeserializer( + per_process::enabled_debug_list.enabled(DebugCategory::SEA), v) {} + + template ::value>* = nullptr, + std::enable_if_t::value>* = nullptr> + T Read(); +}; + +template <> +SeaResource SeaDeserializer::Read() { + uint32_t magic = ReadArithmetic(); + Debug("Read SEA magic %x\n", magic); + + CHECK_EQ(magic, kMagic); + SeaFlags flags(static_cast(ReadArithmetic())); + Debug("Read SEA flags %x\n", static_cast(flags)); + + uint8_t extension_value = ReadArithmetic(); + SeaExecArgvExtension exec_argv_extension = + static_cast(extension_value); + Debug("Read SEA resource exec argv extension %u\n", extension_value); + CHECK_EQ(read_total, SeaResource::kHeaderSize); + + std::string_view code_path = + ReadStringView(StringLogMode::kAddressAndContent); + Debug( + "Read SEA code path %p, size=%zu\n", code_path.data(), code_path.size()); + + bool use_snapshot = static_cast(flags & SeaFlags::kUseSnapshot); + std::string_view code = + ReadStringView(use_snapshot ? StringLogMode::kAddressOnly + : StringLogMode::kAddressAndContent); + + Debug("Read SEA resource %s %p, size=%zu\n", + use_snapshot ? "snapshot" : "code", + code.data(), + code.size()); + + std::string_view code_cache; + if (static_cast(flags & SeaFlags::kUseCodeCache)) { + code_cache = ReadStringView(StringLogMode::kAddressOnly); + Debug("Read SEA resource code cache %p, size=%zu\n", + code_cache.data(), + code_cache.size()); + } + + std::unordered_map assets; + if (static_cast(flags & SeaFlags::kIncludeAssets)) { + size_t assets_size = ReadArithmetic(); + Debug("Read SEA resource assets size %zu\n", assets_size); + for (size_t i = 0; i < assets_size; ++i) { + std::string_view key = ReadStringView(StringLogMode::kAddressAndContent); + std::string_view content = ReadStringView(StringLogMode::kAddressOnly); + Debug("Read SEA resource asset %s at %p, size=%zu\n", + key, + content.data(), + content.size()); + assets.emplace(key, content); + } + } + + std::vector exec_argv; + if (static_cast(flags & SeaFlags::kIncludeExecArgv)) { + size_t exec_argv_size = ReadArithmetic(); + Debug("Read SEA resource exec args size %zu\n", exec_argv_size); + exec_argv.reserve(exec_argv_size); + for (size_t i = 0; i < exec_argv_size; ++i) { + std::string_view arg = ReadStringView(StringLogMode::kAddressAndContent); + Debug("Read SEA resource exec arg %s at %p, size=%zu\n", + arg.data(), + arg.data(), + arg.size()); + exec_argv.emplace_back(arg); + } + } + return {flags, + exec_argv_extension, + code_path, + code, + code_cache, + assets, + exec_argv}; +} + +std::string_view FindSingleExecutableBlob() { +#if !defined(DISABLE_SINGLE_EXECUTABLE_APPLICATION) + CHECK(IsSingleExecutable()); + static const std::string_view result = []() -> std::string_view { + size_t size; +#ifdef __APPLE__ + postject_options options; + postject_options_init(&options); + options.macho_segment_name = "NODE_SEA"; + const char* blob = static_cast( + postject_find_resource("NODE_SEA_BLOB", &size, &options)); +#else + const char* blob = static_cast( + postject_find_resource("NODE_SEA_BLOB", &size, nullptr)); +#endif + return {blob, size}; + }(); + per_process::Debug(DebugCategory::SEA, + "Found SEA blob %p, size=%zu\n", + result.data(), + result.size()); + return result; +#else + UNREACHABLE(); +#endif // !defined(DISABLE_SINGLE_EXECUTABLE_APPLICATION) +} + +} // anonymous namespace + +bool SeaResource::use_snapshot() const { + return static_cast(flags & SeaFlags::kUseSnapshot); +} + +bool SeaResource::use_code_cache() const { + return static_cast(flags & SeaFlags::kUseCodeCache); +} + +SeaResource FindSingleExecutableResource() { + static const SeaResource sea_resource = []() -> SeaResource { + std::string_view blob = FindSingleExecutableBlob(); + per_process::Debug(DebugCategory::SEA, + "Found SEA resource %p, size=%zu\n", + blob.data(), + blob.size()); + SeaDeserializer deserializer(blob); + return deserializer.Read(); + }(); + return sea_resource; +} + +bool IsSingleExecutable() { + return postject_has_resource(); +} + +void IsSea(const FunctionCallbackInfo& args) { + args.GetReturnValue().Set(IsSingleExecutable()); +} + +void IsExperimentalSeaWarningNeeded(const FunctionCallbackInfo& args) { + bool is_building_sea = + !per_process::cli_options->experimental_sea_config.empty(); + if (is_building_sea) { + args.GetReturnValue().Set(true); + return; + } + + if (!IsSingleExecutable()) { + args.GetReturnValue().Set(false); + return; + } + + SeaResource sea_resource = FindSingleExecutableResource(); + args.GetReturnValue().Set(!static_cast( + sea_resource.flags & SeaFlags::kDisableExperimentalSeaWarning)); +} + +std::tuple FixupArgsForSEA(int argc, char** argv) { + // Repeats argv[0] at position 1 on argv as a replacement for the missing + // entry point file path. + if (IsSingleExecutable()) { + static std::vector new_argv; + static std::vector exec_argv_storage; + static std::vector cli_extension_args; + + SeaResource sea_resource = FindSingleExecutableResource(); + + new_argv.clear(); + exec_argv_storage.clear(); + cli_extension_args.clear(); + + // Handle CLI extension mode for --node-options + if (sea_resource.exec_argv_extension == SeaExecArgvExtension::kCli) { + // Extract --node-options and filter argv + for (int i = 1; i < argc; ++i) { + if (strncmp(argv[i], "--node-options=", 15) == 0) { + std::string node_options = argv[i] + 15; + std::vector errors; + cli_extension_args = ParseNodeOptionsEnvVar(node_options, &errors); + // Remove this argument by shifting the rest + for (int j = i; j < argc - 1; ++j) { + argv[j] = argv[j + 1]; + } + argc--; + i--; // Adjust index since we removed an element + } + } + } + + // Reserve space for argv[0], exec argv, cli extension args, original argv, + // and nullptr + new_argv.reserve(argc + sea_resource.exec_argv.size() + + cli_extension_args.size() + 2); + new_argv.emplace_back(argv[0]); + + // Insert exec argv from SEA config + if (!sea_resource.exec_argv.empty()) { + exec_argv_storage.reserve(sea_resource.exec_argv.size() + + cli_extension_args.size()); + for (const auto& arg : sea_resource.exec_argv) { + exec_argv_storage.emplace_back(arg); + new_argv.emplace_back(exec_argv_storage.back().data()); + } + } + + // Insert CLI extension args + for (const auto& arg : cli_extension_args) { + exec_argv_storage.emplace_back(arg); + new_argv.emplace_back(exec_argv_storage.back().data()); + } + + // Add actual run time arguments + new_argv.insert(new_argv.end(), argv, argv + argc); + new_argv.emplace_back(nullptr); + argc = new_argv.size() - 1; + argv = new_argv.data(); + } + + return {argc, argv}; +} + +namespace { + +struct SeaConfig { + std::string main_path; + std::string output_path; + SeaFlags flags = SeaFlags::kDefault; + SeaExecArgvExtension exec_argv_extension = SeaExecArgvExtension::kEnv; + std::unordered_map assets; + std::vector exec_argv; +}; + +std::optional ParseSingleExecutableConfig( + const std::string& config_path) { + std::string config; + int r = ReadFileSync(&config, config_path.c_str()); + if (r != 0) { + const char* err = uv_strerror(r); + FPrintF(stderr, + "Cannot read single executable configuration from %s: %s\n", + config_path, + err); + return std::nullopt; + } + + SeaConfig result; + + simdjson::ondemand::parser parser; + simdjson::ondemand::document document; + simdjson::ondemand::object main_object; + simdjson::error_code error = + parser.iterate(simdjson::pad(config)).get(document); + + if (!error) { + error = document.get_object().get(main_object); + } + if (error) { + FPrintF(stderr, + "Cannot parse JSON from %s: %s\n", + config_path, + simdjson::error_message(error)); + return std::nullopt; + } + + bool use_snapshot_value = false; + bool use_code_cache_value = false; + + for (auto field : main_object) { + std::string_view key; + if (field.unescaped_key().get(key)) { + FPrintF(stderr, "Cannot read key from %s\n", config_path); + return std::nullopt; + } + if (key == "main") { + if (field.value().get_string().get(result.main_path) || + result.main_path.empty()) { + FPrintF(stderr, + "\"main\" field of %s is not a non-empty string\n", + config_path); + return std::nullopt; + } + } else if (key == "output") { + if (field.value().get_string().get(result.output_path) || + result.output_path.empty()) { + FPrintF(stderr, + "\"output\" field of %s is not a non-empty string\n", + config_path); + return std::nullopt; + } + } else if (key == "disableExperimentalSEAWarning") { + bool disable_experimental_sea_warning; + if (field.value().get_bool().get(disable_experimental_sea_warning)) { + FPrintF( + stderr, + "\"disableExperimentalSEAWarning\" field of %s is not a Boolean\n", + config_path); + return std::nullopt; + } + if (disable_experimental_sea_warning) { + result.flags |= SeaFlags::kDisableExperimentalSeaWarning; + } + } else if (key == "useSnapshot") { + if (field.value().get_bool().get(use_snapshot_value)) { + FPrintF(stderr, + "\"useSnapshot\" field of %s is not a Boolean\n", + config_path); + return std::nullopt; + } + if (use_snapshot_value) { + result.flags |= SeaFlags::kUseSnapshot; + } + } else if (key == "useCodeCache") { + if (field.value().get_bool().get(use_code_cache_value)) { + FPrintF(stderr, + "\"useCodeCache\" field of %s is not a Boolean\n", + config_path); + return std::nullopt; + } + if (use_code_cache_value) { + result.flags |= SeaFlags::kUseCodeCache; + } + } else if (key == "assets") { + simdjson::ondemand::object assets_object; + if (field.value().get_object().get(assets_object)) { + FPrintF(stderr, + "\"assets\" field of %s is not a map of strings\n", + config_path); + return std::nullopt; + } + simdjson::ondemand::value asset_value; + for (auto asset_field : assets_object) { + std::string_view key_str; + std::string_view value_str; + if (asset_field.unescaped_key().get(key_str) || + asset_field.value().get(asset_value) || + asset_value.get_string().get(value_str)) { + FPrintF(stderr, + "\"assets\" field of %s is not a map of strings\n", + config_path); + return std::nullopt; + } + + result.assets.emplace(key_str, value_str); + } + + if (!result.assets.empty()) { + result.flags |= SeaFlags::kIncludeAssets; + } + } else if (key == "execArgv") { + simdjson::ondemand::array exec_argv_array; + if (field.value().get_array().get(exec_argv_array)) { + FPrintF(stderr, + "\"execArgv\" field of %s is not an array of strings\n", + config_path); + return std::nullopt; + } + std::vector exec_argv; + for (auto argv : exec_argv_array) { + std::string_view argv_str; + if (argv.get_string().get(argv_str)) { + FPrintF(stderr, + "\"execArgv\" field of %s is not an array of strings\n", + config_path); + return std::nullopt; + } + exec_argv.emplace_back(argv_str); + } + if (!exec_argv.empty()) { + result.flags |= SeaFlags::kIncludeExecArgv; + result.exec_argv = std::move(exec_argv); + } + } else if (key == "execArgvExtension") { + std::string_view extension_str; + if (field.value().get_string().get(extension_str)) { + FPrintF(stderr, + "\"execArgvExtension\" field of %s is not a string\n", + config_path); + return std::nullopt; + } + if (extension_str == "none") { + result.exec_argv_extension = SeaExecArgvExtension::kNone; + } else if (extension_str == "env") { + result.exec_argv_extension = SeaExecArgvExtension::kEnv; + } else if (extension_str == "cli") { + result.exec_argv_extension = SeaExecArgvExtension::kCli; + } else { + FPrintF(stderr, + "\"execArgvExtension\" field of %s must be one of " + "\"none\", \"env\", or \"cli\"\n", + config_path); + return std::nullopt; + } + } + } + + if (static_cast(result.flags & SeaFlags::kUseSnapshot) && + static_cast(result.flags & SeaFlags::kUseCodeCache)) { + // TODO(joyeecheung): code cache in snapshot should be configured by + // separate snapshot configurations. + FPrintF(stderr, + "\"useCodeCache\" is redundant when \"useSnapshot\" is true\n"); + } + + if (result.main_path.empty()) { + FPrintF(stderr, + "\"main\" field of %s is not a non-empty string\n", + config_path); + return std::nullopt; + } + + if (result.output_path.empty()) { + FPrintF(stderr, + "\"output\" field of %s is not a non-empty string\n", + config_path); + return std::nullopt; + } + + return result; +} + +ExitCode GenerateSnapshotForSEA(const SeaConfig& config, + const std::vector& args, + const std::vector& exec_args, + const std::string& builder_script_content, + const SnapshotConfig& snapshot_config, + std::vector* snapshot_blob) { + SnapshotData snapshot; + // TODO(joyeecheung): make the arguments configurable through the JSON + // config or a programmatic API. + std::vector patched_args = {args[0], config.main_path}; + ExitCode exit_code = SnapshotBuilder::Generate(&snapshot, + patched_args, + exec_args, + builder_script_content, + snapshot_config); + if (exit_code != ExitCode::kNoFailure) { + return exit_code; + } + auto& persistents = snapshot.env_info.principal_realm.persistent_values; + auto it = std::ranges::find_if(persistents, [](const PropInfo& prop) { + return prop.name == "snapshot_deserialize_main"; + }); + if (it == persistents.end()) { + FPrintF( + stderr, + "%s does not invoke " + "v8.startupSnapshot.setDeserializeMainFunction(), which is required " + "for snapshot scripts used to build single executable applications." + "\n", + config.main_path); + return ExitCode::kGenericUserError; + } + // We need the temporary variable for copy elision. + std::vector temp = snapshot.ToBlob(); + *snapshot_blob = std::move(temp); + return ExitCode::kNoFailure; +} + +std::optional GenerateCodeCache(std::string_view main_path, + std::string_view main_script) { + RAIIIsolate raii_isolate(SnapshotBuilder::GetEmbeddedSnapshotData()); + Isolate* isolate = raii_isolate.get(); + + v8::Isolate::Scope isolate_scope(isolate); + HandleScope handle_scope(isolate); + + Local context = Context::New(isolate); + Context::Scope context_scope(context); + + errors::PrinterTryCatch bootstrapCatch( + isolate, errors::PrinterTryCatch::kPrintSourceLine); + + Local filename; + if (!String::NewFromUtf8(isolate, + main_path.data(), + NewStringType::kNormal, + main_path.length()) + .ToLocal(&filename)) { + return std::nullopt; + } + + Local content; + if (!String::NewFromUtf8(isolate, + main_script.data(), + NewStringType::kNormal, + main_script.length()) + .ToLocal(&content)) { + return std::nullopt; + } + + LocalVector parameters( + isolate, + { + FIXED_ONE_BYTE_STRING(isolate, "exports"), + FIXED_ONE_BYTE_STRING(isolate, "require"), + FIXED_ONE_BYTE_STRING(isolate, "module"), + FIXED_ONE_BYTE_STRING(isolate, "__filename"), + FIXED_ONE_BYTE_STRING(isolate, "__dirname"), + }); + ScriptOrigin script_origin(filename, 0, 0, true); + ScriptCompiler::Source script_source(content, script_origin); + MaybeLocal maybe_fn = + ScriptCompiler::CompileFunction(context, + &script_source, + parameters.size(), + parameters.data(), + 0, + nullptr); + Local fn; + if (!maybe_fn.ToLocal(&fn)) { + return std::nullopt; + } + + // TODO(RaisinTen): Using the V8 code cache prevents us from using `import()` + // in the SEA code. Support it. + // Refs: https://github.com/nodejs/node/pull/48191#discussion_r1213271430 + std::unique_ptr cache{ + ScriptCompiler::CreateCodeCacheForFunction(fn)}; + std::string code_cache(cache->data, cache->data + cache->length); + return code_cache; +} + +int BuildAssets(const std::unordered_map& config, + std::unordered_map* assets) { + for (auto const& [key, path] : config) { + std::string blob; + int r = ReadFileSync(&blob, path.c_str()); + if (r != 0) { + const char* err = uv_strerror(r); + FPrintF(stderr, "Cannot read asset %s: %s\n", path.c_str(), err); + return r; + } + assets->emplace(key, std::move(blob)); + } + return 0; +} + +ExitCode GenerateSingleExecutableBlob( + const SeaConfig& config, + const std::vector& args, + const std::vector& exec_args) { + std::string main_script; + // TODO(joyeecheung): unify the file utils. + int r = ReadFileSync(&main_script, config.main_path.c_str()); + if (r != 0) { + const char* err = uv_strerror(r); + FPrintF(stderr, "Cannot read main script %s:%s\n", config.main_path, err); + return ExitCode::kGenericUserError; + } + + std::vector snapshot_blob; + bool builds_snapshot_from_main = + static_cast(config.flags & SeaFlags::kUseSnapshot); + if (builds_snapshot_from_main) { + // TODO(joyeecheung): allow passing snapshot configuration in SEA configs. + SnapshotConfig snapshot_config; + snapshot_config.builder_script_path = main_script; + ExitCode exit_code = GenerateSnapshotForSEA( + config, args, exec_args, main_script, snapshot_config, &snapshot_blob); + if (exit_code != ExitCode::kNoFailure) { + return exit_code; + } + } + + std::optional optional_sv_code_cache; + std::string code_cache; + if (static_cast(config.flags & SeaFlags::kUseCodeCache)) { + std::optional optional_code_cache = + GenerateCodeCache(config.main_path, main_script); + if (!optional_code_cache.has_value()) { + FPrintF(stderr, "Cannot generate V8 code cache\n"); + return ExitCode::kGenericUserError; + } + code_cache = optional_code_cache.value(); + optional_sv_code_cache = code_cache; + } + + std::unordered_map assets; + if (!config.assets.empty() && BuildAssets(config.assets, &assets) != 0) { + return ExitCode::kGenericUserError; + } + std::unordered_map assets_view; + for (auto const& [key, content] : assets) { + assets_view.emplace(key, content); + } + std::vector exec_argv_view; + for (const auto& arg : config.exec_argv) { + exec_argv_view.emplace_back(arg); + } + SeaResource sea{ + config.flags, + config.exec_argv_extension, + config.main_path, + builds_snapshot_from_main + ? std::string_view{snapshot_blob.data(), snapshot_blob.size()} + : std::string_view{main_script.data(), main_script.size()}, + optional_sv_code_cache, + assets_view, + exec_argv_view}; + + SeaSerializer serializer; + serializer.Write(sea); + + uv_buf_t buf = uv_buf_init(serializer.sink.data(), serializer.sink.size()); + r = WriteFileSync(config.output_path.c_str(), buf); + if (r != 0) { + const char* err = uv_strerror(r); + FPrintF(stderr, "Cannot write output to %s:%s\n", config.output_path, err); + return ExitCode::kGenericUserError; + } + + FPrintF(stderr, + "Wrote single executable preparation blob to %s\n", + config.output_path); + return ExitCode::kNoFailure; +} + +} // anonymous namespace + +ExitCode BuildSingleExecutableBlob(const std::string& config_path, + const std::vector& args, + const std::vector& exec_args) { + std::optional config_opt = + ParseSingleExecutableConfig(config_path); + if (config_opt.has_value()) { + ExitCode code = + GenerateSingleExecutableBlob(config_opt.value(), args, exec_args); + return code; + } + + return ExitCode::kGenericUserError; +} + +void GetAsset(const FunctionCallbackInfo& args) { + CHECK_EQ(args.Length(), 1); + CHECK(args[0]->IsString()); + Utf8Value key(args.GetIsolate(), args[0]); + SeaResource sea_resource = FindSingleExecutableResource(); + if (sea_resource.assets.empty()) { + return; + } + auto it = sea_resource.assets.find(*key); + if (it == sea_resource.assets.end()) { + return; + } + // We cast away the constness here, the JS land should ensure that + // the data is not mutated. + std::unique_ptr store = ArrayBuffer::NewBackingStore( + const_cast(it->second.data()), + it->second.size(), + [](void*, size_t, void*) {}, + nullptr); + Local ab = ArrayBuffer::New(args.GetIsolate(), std::move(store)); + args.GetReturnValue().Set(ab); +} + +void GetAssetKeys(const FunctionCallbackInfo& args) { + CHECK_EQ(args.Length(), 0); + Isolate* isolate = args.GetIsolate(); + SeaResource sea_resource = FindSingleExecutableResource(); + + Local context = isolate->GetCurrentContext(); + LocalVector keys(isolate); + keys.reserve(sea_resource.assets.size()); + for (const auto& [key, _] : sea_resource.assets) { + Local key_str; + if (!ToV8Value(context, key).ToLocal(&key_str)) { + return; + } + keys.push_back(key_str); + } + Local result = Array::New(isolate, keys.data(), keys.size()); + args.GetReturnValue().Set(result); +} + +MaybeLocal LoadSingleExecutableApplication( + const StartExecutionCallbackInfo& info) { + // Here we are currently relying on the fact that in NodeMainInstance::Run(), + // env->context() is entered. + Local context = Isolate::GetCurrent()->GetCurrentContext(); + Environment* env = Environment::GetCurrent(context); + SeaResource sea = FindSingleExecutableResource(); + + CHECK(!sea.use_snapshot()); + // TODO(joyeecheung): this should be an external string. Refactor UnionBytes + // and make it easy to create one based on static content on the fly. + Local main_script = + ToV8Value(env->context(), sea.main_code_or_snapshot).ToLocalChecked(); + return info.run_cjs->Call( + env->context(), Null(env->isolate()), 1, &main_script); +} + +bool MaybeLoadSingleExecutableApplication(Environment* env) { +#ifndef DISABLE_SINGLE_EXECUTABLE_APPLICATION + if (!IsSingleExecutable()) { + return false; + } + + SeaResource sea = FindSingleExecutableResource(); + + if (sea.use_snapshot()) { + // The SEA preparation blob building process should already enforce this, + // this check is just here to guard against the unlikely case where + // the SEA preparation blob has been manually modified by someone. + CHECK(!env->snapshot_deserialize_main().IsEmpty()); + LoadEnvironment(env, StartExecutionCallback{}); + return true; + } + + LoadEnvironment(env, LoadSingleExecutableApplication); + return true; +#else + return false; +#endif +} + +void Initialize(Local target, + Local unused, + Local context, + void* priv) { + SetMethod(context, target, "isSea", IsSea); + SetMethod(context, + target, + "isExperimentalSeaWarningNeeded", + IsExperimentalSeaWarningNeeded); + SetMethod(context, target, "getAsset", GetAsset); + SetMethod(context, target, "getAssetKeys", GetAssetKeys); +} + +void RegisterExternalReferences(ExternalReferenceRegistry* registry) { + registry->Register(IsSea); + registry->Register(IsExperimentalSeaWarningNeeded); + registry->Register(GetAsset); + registry->Register(GetAssetKeys); +} + +} // namespace sea +} // namespace node + +NODE_BINDING_CONTEXT_AWARE_INTERNAL(sea, node::sea::Initialize) +NODE_BINDING_EXTERNAL_REFERENCE(sea, node::sea::RegisterExternalReferences) \ No newline at end of file diff --git a/.codesandbox/node/os.js b/.codesandbox/node/os.js new file mode 100644 index 00000000..cd406d49 --- /dev/null +++ b/.codesandbox/node/os.js @@ -0,0 +1,326 @@ +"use strict"; + +const { + ArrayPrototypePush, + Float64Array, + ObjectDefineProperties, + ObjectFreeze, + StringPrototypeSlice, + SymbolToPrimitive, +} = primordials; + +const { getTempDir } = internalBinding("credentials"); +const constants = internalBinding("constants").os; +const isWindows = process.platform === "win32"; + +const { + codes: { ERR_SYSTEM_ERROR }, + hideStackFrames, +} = require("internal/errors"); +const { getCIDR } = require("internal/util"); +const { validateInt32 } = require("internal/validators"); + +const { + getAvailableParallelism, + getCPUs, + getFreeMem, + getHomeDirectory: _getHomeDirectory, + getHostname: _getHostname, + getInterfaceAddresses: _getInterfaceAddresses, + getLoadAvg, + getPriority: _getPriority, + getOSInformation: _getOSInformation, + getTotalMem, + getUserInfo, + getUptime: _getUptime, + isBigEndian, + setPriority: _setPriority, +} = internalBinding("os"); + +function getCheckedFunction(fn) { + return hideStackFrames(function checkError() { + const ctx = {}; + const ret = fn(ctx); + if (ret === undefined) { + throw new ERR_SYSTEM_ERROR.HideStackFramesError(ctx); + } + return ret; + }); +} + +const { 0: type, 1: version, 2: release, 3: machine } = _getOSInformation(); + +const getHomeDirectory = getCheckedFunction(_getHomeDirectory); +const getHostname = getCheckedFunction(_getHostname); +const getInterfaceAddresses = getCheckedFunction(_getInterfaceAddresses); +const getUptime = getCheckedFunction(_getUptime); + +/** + * @returns {string} + */ +const getOSRelease = () => release; +/** + * @returns {string} + */ +const getOSType = () => type; +/** + * @returns {string} + */ +const getOSVersion = () => version; +/** + * @returns {string} + */ +const getMachine = () => machine; + +getAvailableParallelism[SymbolToPrimitive] = () => getAvailableParallelism(); +getFreeMem[SymbolToPrimitive] = () => getFreeMem(); +getHostname[SymbolToPrimitive] = () => getHostname(); +getOSVersion[SymbolToPrimitive] = () => getOSVersion(); +getOSType[SymbolToPrimitive] = () => getOSType(); +getOSRelease[SymbolToPrimitive] = () => getOSRelease(); +getMachine[SymbolToPrimitive] = () => getMachine(); +getHomeDirectory[SymbolToPrimitive] = () => getHomeDirectory(); +getTotalMem[SymbolToPrimitive] = () => getTotalMem(); +getUptime[SymbolToPrimitive] = () => getUptime(); + +const kEndianness = isBigEndian ? "BE" : "LE"; + +const avgValues = new Float64Array(3); + +/** + * @returns {[number, number, number]} + */ +function loadavg() { + getLoadAvg(avgValues); + return [avgValues[0], avgValues[1], avgValues[2]]; +} + +/** + * Returns an array of objects containing information about each + * logical CPU core. + * @returns {Array<{ + * model: string, + * speed: number, + * times: { + * user: number, + * nice: number, + * sys: number, + * idle: number, + * irq: number, + * }, + * }>} + */ +function cpus() { + // [] is a bugfix for a regression introduced in 51cea61 + const data = getCPUs() || []; + const result = []; + let i = 0; + while (i < data.length) { + ArrayPrototypePush(result, { + model: data[i++], + speed: data[i++], + times: { + user: data[i++], + nice: data[i++], + sys: data[i++], + idle: data[i++], + irq: data[i++], + }, + }); + } + return result; +} + +/** + * @returns {string} + */ +function arch() { + return process.arch; +} +arch[SymbolToPrimitive] = () => process.arch; + +/** + * @returns {string} + */ +function platform() { + return process.platform; +} +platform[SymbolToPrimitive] = () => process.platform; + +/** + * @returns {string} + */ +function tmpdir() { + if (isWindows) { + const path = + process.env.TEMP || + process.env.TMP || + (process.env.SystemRoot || process.env.windir) + "\\temp"; + + if ( + path.length > 1 && + path[path.length - 1] === "\\" && + path[path.length - 2] !== ":" + ) { + return StringPrototypeSlice(path, 0, -1); + } + + return path; + } + + return getTempDir() || "/tmp"; +} +tmpdir[SymbolToPrimitive] = () => tmpdir(); + +/** + * @returns {'BE' | 'LE'} + */ +function endianness() { + return kEndianness; +} +endianness[SymbolToPrimitive] = () => kEndianness; + +/** + * @returns {Record>} + */ +function networkInterfaces() { + const data = getInterfaceAddresses(); + const result = {}; + + if (data === undefined) return result; + for (let i = 0; i < data.length; i += 7) { + const name = data[i]; + const entry = { + address: data[i + 1], + netmask: data[i + 2], + family: data[i + 3], + mac: data[i + 4], + internal: data[i + 5], + cidr: getCIDR(data[i + 1], data[i + 2], data[i + 3]), + }; + const scopeid = data[i + 6]; + if (scopeid !== -1) entry.scopeid = scopeid; + + const existing = result[name]; + if (existing !== undefined) ArrayPrototypePush(existing, entry); + else result[name] = [entry]; + } + + return result; +} + +/** + * @param {number} [pid] + * @param {number} [priority] + * @returns {void} + */ +function setPriority(pid, priority) { + if (priority === undefined) { + priority = pid; + pid = 0; + } + + validateInt32(pid, "pid"); + validateInt32(priority, "priority", -20, 19); + + const ctx = {}; + + if (_setPriority(pid, priority, ctx) !== 0) throw new ERR_SYSTEM_ERROR(ctx); +} + +/** + * @param {number} [pid] + * @returns {number} + */ +function getPriority(pid) { + if (pid === undefined) pid = 0; + else validateInt32(pid, "pid"); + + const ctx = {}; + const priority = _getPriority(pid, ctx); + + if (priority === undefined) throw new ERR_SYSTEM_ERROR(ctx); + + return priority; +} + +/** + * @param {{ encoding?: string }} [options] If `encoding` is set to + * `'buffer'`, the `username`, `shell`, and `homedir` values will + * be `Buffer` instances. + * @returns {{ + * uid: number, + * gid: number, + * username: string, + * homedir: string, + * shell: string | null, + * }} + */ +function userInfo(options) { + if (typeof options !== "object") options = null; + + const ctx = {}; + const user = getUserInfo(options, ctx); + + if (user === undefined) throw new ERR_SYSTEM_ERROR(ctx); + + return user; +} + +module.exports = { + arch, + availableParallelism: getAvailableParallelism, + cpus, + endianness, + freemem: getFreeMem, + getPriority, + homedir: getHomeDirectory, + hostname: getHostname, + loadavg, + networkInterfaces, + platform, + release: getOSRelease, + setPriority, + tmpdir, + totalmem: getTotalMem, + type: getOSType, + userInfo, + uptime: getUptime, + version: getOSVersion, + machine: getMachine, +}; + +ObjectFreeze(constants.signals); + +ObjectDefineProperties(module.exports, { + constants: { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, + }, + + EOL: { + __proto__: null, + configurable: true, + enumerable: true, + writable: false, + value: isWindows ? "\r\n" : "\n", + }, + + devNull: { + __proto__: null, + configurable: true, + enumerable: true, + writable: false, + value: isWindows ? "\\\\.\\nul" : "/dev/null", + }, +}); diff --git a/.codesandbox/node/path.js b/.codesandbox/node/path.js new file mode 100644 index 00000000..08ee62f3 --- /dev/null +++ b/.codesandbox/node/path.js @@ -0,0 +1,1790 @@ +"use strict"; + +const { + ArrayPrototypeIncludes, + ArrayPrototypeJoin, + ArrayPrototypePush, + ArrayPrototypeSlice, + FunctionPrototypeBind, + StringPrototypeCharCodeAt, + StringPrototypeIncludes, + StringPrototypeIndexOf, + StringPrototypeLastIndexOf, + StringPrototypeRepeat, + StringPrototypeReplace, + StringPrototypeSlice, + StringPrototypeSplit, + StringPrototypeToLowerCase, + StringPrototypeToUpperCase, +} = primordials; + +const { + CHAR_UPPERCASE_A, + CHAR_LOWERCASE_A, + CHAR_UPPERCASE_Z, + CHAR_LOWERCASE_Z, + CHAR_DOT, + CHAR_FORWARD_SLASH, + CHAR_BACKWARD_SLASH, + CHAR_COLON, + CHAR_QUESTION_MARK, +} = require("internal/constants"); +const { validateObject, validateString } = require("internal/validators"); + +const { isWindows, getLazy } = require("internal/util"); + +const lazyMatchGlobPattern = getLazy( + () => require("internal/fs/glob").matchGlobPattern +); + +function isPathSeparator(code) { + return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH; +} + +function isPosixPathSeparator(code) { + return code === CHAR_FORWARD_SLASH; +} + +const WINDOWS_RESERVED_NAMES = [ + "CON", + "PRN", + "AUX", + "NUL", + "COM1", + "COM2", + "COM3", + "COM4", + "COM5", + "COM6", + "COM7", + "COM8", + "COM9", + "LPT1", + "LPT2", + "LPT3", + "LPT4", + "LPT5", + "LPT6", + "LPT7", + "LPT8", + "LPT9", + "COM\xb9", + "COM\xb2", + "COM\xb3", + "LPT\xb9", + "LPT\xb2", + "LPT\xb3", +]; + +function isWindowsReservedName(path, colonIndex) { + const devicePart = StringPrototypeToUpperCase( + StringPrototypeSlice(path, 0, colonIndex) + ); + return ArrayPrototypeIncludes(WINDOWS_RESERVED_NAMES, devicePart); +} + +function isWindowsDeviceRoot(code) { + return ( + (code >= CHAR_UPPERCASE_A && code <= CHAR_UPPERCASE_Z) || + (code >= CHAR_LOWERCASE_A && code <= CHAR_LOWERCASE_Z) + ); +} + +// Resolves . and .. elements in a path with directory names +function normalizeString(path, allowAboveRoot, separator, isPathSeparator) { + let res = ""; + let lastSegmentLength = 0; + let lastSlash = -1; + let dots = 0; + let code = 0; + for (let i = 0; i <= path.length; ++i) { + if (i < path.length) code = StringPrototypeCharCodeAt(path, i); + else if (isPathSeparator(code)) break; + else code = CHAR_FORWARD_SLASH; + + if (isPathSeparator(code)) { + if (lastSlash === i - 1 || dots === 1) { + // NOOP + } else if (dots === 2) { + if ( + res.length < 2 || + lastSegmentLength !== 2 || + StringPrototypeCharCodeAt(res, res.length - 1) !== CHAR_DOT || + StringPrototypeCharCodeAt(res, res.length - 2) !== CHAR_DOT + ) { + if (res.length > 2) { + const lastSlashIndex = res.length - lastSegmentLength - 1; + if (lastSlashIndex === -1) { + res = ""; + lastSegmentLength = 0; + } else { + res = StringPrototypeSlice(res, 0, lastSlashIndex); + lastSegmentLength = + res.length - 1 - StringPrototypeLastIndexOf(res, separator); + } + lastSlash = i; + dots = 0; + continue; + } else if (res.length !== 0) { + res = ""; + lastSegmentLength = 0; + lastSlash = i; + dots = 0; + continue; + } + } + if (allowAboveRoot) { + res += res.length > 0 ? `${separator}..` : ".."; + lastSegmentLength = 2; + } + } else { + if (res.length > 0) + res += `${separator}${StringPrototypeSlice(path, lastSlash + 1, i)}`; + else res = StringPrototypeSlice(path, lastSlash + 1, i); + lastSegmentLength = i - lastSlash - 1; + } + lastSlash = i; + dots = 0; + } else if (code === CHAR_DOT && dots !== -1) { + ++dots; + } else { + dots = -1; + } + } + return res; +} + +function formatExt(ext) { + return ext ? `${ext[0] === "." ? "" : "."}${ext}` : ""; +} + +/** + * @param {string} sep + * @param {{ + * dir?: string; + * root?: string; + * base?: string; + * name?: string; + * ext?: string; + * }} pathObject + * @returns {string} + */ +function _format(sep, pathObject) { + validateObject(pathObject, "pathObject"); + const dir = pathObject.dir || pathObject.root; + const base = + pathObject.base || `${pathObject.name || ""}${formatExt(pathObject.ext)}`; + if (!dir) { + return base; + } + return dir === pathObject.root ? `${dir}${base}` : `${dir}${sep}${base}`; +} + +const forwardSlashRegExp = /\//g; + +const win32 = { + /** + * path.resolve([from ...], to) + * @param {...string} args + * @returns {string} + */ + resolve(...args) { + let resolvedDevice = ""; + let resolvedTail = ""; + let resolvedAbsolute = false; + + for (let i = args.length - 1; i >= -1; i--) { + let path; + if (i >= 0) { + path = args[i]; + validateString(path, `paths[${i}]`); + + // Skip empty entries + if (path.length === 0) { + continue; + } + } else if (resolvedDevice.length === 0) { + path = process.cwd(); + // Fast path for current directory + if ( + args.length === 0 || + (args.length === 1 && + (args[0] === "" || args[0] === ".") && + isPathSeparator(StringPrototypeCharCodeAt(path, 0))) + ) { + if (!isWindows) { + path = StringPrototypeReplace(path, forwardSlashRegExp, "\\"); + } + return path; + } + } else { + // Windows has the concept of drive-specific current working + // directories. If we've resolved a drive letter but not yet an + // absolute path, get cwd for that drive, or the process cwd if + // the drive cwd is not available. We're sure the device is not + // a UNC path at this points, because UNC paths are always absolute. + path = process.env[`=${resolvedDevice}`] || process.cwd(); + + // Verify that a cwd was found and that it actually points + // to our drive. If not, default to the drive's root. + if ( + path === undefined || + (StringPrototypeToLowerCase(StringPrototypeSlice(path, 0, 2)) !== + StringPrototypeToLowerCase(resolvedDevice) && + StringPrototypeCharCodeAt(path, 2) === CHAR_BACKWARD_SLASH) + ) { + path = `${resolvedDevice}\\`; + } + } + + const len = path.length; + let rootEnd = 0; + let device = ""; + let isAbsolute = false; + const code = StringPrototypeCharCodeAt(path, 0); + + // Try to match a root + if (len === 1) { + if (isPathSeparator(code)) { + // `path` contains just a path separator + rootEnd = 1; + isAbsolute = true; + } + } else if (isPathSeparator(code)) { + // Possible UNC root + + // If we started with a separator, we know we at least have an + // absolute path of some kind (UNC or otherwise) + isAbsolute = true; + + if (isPathSeparator(StringPrototypeCharCodeAt(path, 1))) { + // Matched double path separator at beginning + let j = 2; + let last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + const firstPart = StringPrototypeSlice(path, last, j); + // Matched! + last = j; + // Match 1 or more path separators + while ( + j < len && + isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j === len || j !== last) { + if (firstPart !== "." && firstPart !== "?") { + // We matched a UNC root + device = `\\\\${firstPart}\\${StringPrototypeSlice( + path, + last, + j + )}`; + rootEnd = j; + } else { + // We matched a device root (e.g. \\\\.\\PHYSICALDRIVE0) + device = `\\\\${firstPart}`; + rootEnd = 4; + } + } + } + } + } else { + rootEnd = 1; + } + } else if ( + isWindowsDeviceRoot(code) && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON + ) { + // Possible device root + device = StringPrototypeSlice(path, 0, 2); + rootEnd = 2; + if (len > 2 && isPathSeparator(StringPrototypeCharCodeAt(path, 2))) { + // Treat separator following drive name as an absolute path + // indicator + isAbsolute = true; + rootEnd = 3; + } + } + + if (device.length > 0) { + if (resolvedDevice.length > 0) { + if ( + StringPrototypeToLowerCase(device) !== + StringPrototypeToLowerCase(resolvedDevice) + ) + // This path points to another device so it is not applicable + continue; + } else { + resolvedDevice = device; + } + } + + if (resolvedAbsolute) { + if (resolvedDevice.length > 0) break; + } else { + resolvedTail = `${StringPrototypeSlice( + path, + rootEnd + )}\\${resolvedTail}`; + resolvedAbsolute = isAbsolute; + if (isAbsolute && resolvedDevice.length > 0) { + break; + } + } + } + + // At this point the path should be resolved to a full absolute path, + // but handle relative paths to be safe (might happen when process.cwd() + // fails) + + // Normalize the tail path + resolvedTail = normalizeString( + resolvedTail, + !resolvedAbsolute, + "\\", + isPathSeparator + ); + + return resolvedAbsolute + ? `${resolvedDevice}\\${resolvedTail}` + : `${resolvedDevice}${resolvedTail}` || "."; + }, + + /** + * @param {string} path + * @returns {string} + */ + normalize(path) { + validateString(path, "path"); + const len = path.length; + if (len === 0) return "."; + let rootEnd = 0; + let device; + let isAbsolute = false; + const code = StringPrototypeCharCodeAt(path, 0); + + // Try to match a root + if (len === 1) { + // `path` contains just a single char, exit early to avoid + // unnecessary work + return isPosixPathSeparator(code) ? "\\" : path; + } + if (isPathSeparator(code)) { + // Possible UNC root + + // If we started with a separator, we know we at least have an absolute + // path of some kind (UNC or otherwise) + isAbsolute = true; + + if (isPathSeparator(StringPrototypeCharCodeAt(path, 1))) { + // Matched double path separator at beginning + let j = 2; + let last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + const firstPart = StringPrototypeSlice(path, last, j); + // Matched! + last = j; + // Match 1 or more path separators + while ( + j < len && + isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j === len || j !== last) { + if (firstPart === "." || firstPart === "?") { + // We matched a device root (e.g. \\\\.\\PHYSICALDRIVE0) + device = `\\\\${firstPart}`; + rootEnd = 4; + const colonIndex = StringPrototypeIndexOf(path, ":"); + // Special case: handle \\?\COM1: or similar reserved device paths + const possibleDevice = StringPrototypeSlice( + path, + 4, + colonIndex + 1 + ); + if ( + isWindowsReservedName( + possibleDevice, + possibleDevice.length - 1 + ) + ) { + device = `\\\\?\\${possibleDevice}`; + rootEnd = 4 + possibleDevice.length; + } + } else if (j === len) { + // We matched a UNC root only + // Return the normalized version of the UNC root since there + // is nothing left to process + return `\\\\${firstPart}\\${StringPrototypeSlice( + path, + last + )}\\`; + } else { + // We matched a UNC root with leftovers + device = `\\\\${firstPart}\\${StringPrototypeSlice( + path, + last, + j + )}`; + rootEnd = j; + } + } + } + } + } else { + rootEnd = 1; + } + } else { + const colonIndex = StringPrototypeIndexOf(path, ":"); + if (colonIndex > 0) { + if (isWindowsDeviceRoot(code) && colonIndex === 1) { + device = StringPrototypeSlice(path, 0, 2); + rootEnd = 2; + if (len > 2 && isPathSeparator(StringPrototypeCharCodeAt(path, 2))) { + isAbsolute = true; + rootEnd = 3; + } + } else if (isWindowsReservedName(path, colonIndex)) { + device = StringPrototypeSlice(path, 0, colonIndex + 1); + rootEnd = colonIndex + 1; + } + } + } + + let tail = + rootEnd < len + ? normalizeString( + StringPrototypeSlice(path, rootEnd), + !isAbsolute, + "\\", + isPathSeparator + ) + : ""; + if (tail.length === 0 && !isAbsolute) tail = "."; + if ( + tail.length > 0 && + isPathSeparator(StringPrototypeCharCodeAt(path, len - 1)) + ) + tail += "\\"; + if ( + !isAbsolute && + device === undefined && + StringPrototypeIncludes(path, ":") + ) { + // If the original path was not absolute and if we have not been able to + // resolve it relative to a particular device, we need to ensure that the + // `tail` has not become something that Windows might interpret as an + // absolute path. See CVE-2024-36139. + if ( + tail.length >= 2 && + isWindowsDeviceRoot(StringPrototypeCharCodeAt(tail, 0)) && + StringPrototypeCharCodeAt(tail, 1) === CHAR_COLON + ) { + return `.\\${tail}`; + } + let index = StringPrototypeIndexOf(path, ":"); + + do { + if ( + index === len - 1 || + isPathSeparator(StringPrototypeCharCodeAt(path, index + 1)) + ) { + return `.\\${tail}`; + } + } while ((index = StringPrototypeIndexOf(path, ":", index + 1)) !== -1); + } + const colonIndex = StringPrototypeIndexOf(path, ":"); + if (isWindowsReservedName(path, colonIndex)) { + return `.\\${device ?? ""}${tail}`; + } + if (device === undefined) { + return isAbsolute ? `\\${tail}` : tail; + } + return isAbsolute ? `${device}\\${tail}` : `${device}${tail}`; + }, + + /** + * @param {string} path + * @returns {boolean} + */ + isAbsolute(path) { + validateString(path, "path"); + const len = path.length; + if (len === 0) return false; + + const code = StringPrototypeCharCodeAt(path, 0); + return ( + isPathSeparator(code) || + // Possible device root + (len > 2 && + isWindowsDeviceRoot(code) && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON && + isPathSeparator(StringPrototypeCharCodeAt(path, 2))) + ); + }, + + /** + * @param {...string} args + * @returns {string} + */ + join(...args) { + if (args.length === 0) return "."; + + const path = []; + for (let i = 0; i < args.length; ++i) { + const arg = args[i]; + validateString(arg, "path"); + if (arg.length > 0) { + ArrayPrototypePush(path, arg); + } + } + + if (path.length === 0) return "."; + + const firstPart = path[0]; + let joined = ArrayPrototypeJoin(path, "\\"); + + // Make sure that the joined path doesn't start with two slashes, because + // normalize() will mistake it for a UNC path then. + // + // This step is skipped when it is very clear that the user actually + // intended to point at a UNC path. This is assumed when the first + // non-empty string arguments starts with exactly two slashes followed by + // at least one more non-slash character. + // + // Note that for normalize() to treat a path as a UNC path it needs to + // have at least 2 components, so we don't filter for that here. + // This means that the user can use join to construct UNC paths from + // a server name and a share name; for example: + // path.join('//server', 'share') -> '\\\\server\\share\\') + let needsReplace = true; + let slashCount = 0; + if (isPathSeparator(StringPrototypeCharCodeAt(firstPart, 0))) { + ++slashCount; + const firstLen = firstPart.length; + if ( + firstLen > 1 && + isPathSeparator(StringPrototypeCharCodeAt(firstPart, 1)) + ) { + ++slashCount; + if (firstLen > 2) { + if (isPathSeparator(StringPrototypeCharCodeAt(firstPart, 2))) + ++slashCount; + else { + // We matched a UNC path in the first part + needsReplace = false; + } + } + } + } + if (needsReplace) { + // Find any more consecutive slashes we need to replace + while ( + slashCount < joined.length && + isPathSeparator(StringPrototypeCharCodeAt(joined, slashCount)) + ) { + slashCount++; + } + + // Replace the slashes if needed + if (slashCount >= 2) + joined = `\\${StringPrototypeSlice(joined, slashCount)}`; + } + + // Skip normalization when reserved device names are present + const parts = []; + let part = ""; + + for (let i = 0; i < joined.length; i++) { + if (joined[i] === "\\") { + if (part) parts.push(part); + part = ""; + // Skip consecutive backslashes + while (i + 1 < joined.length && joined[i + 1] === "\\") i++; + } else { + part += joined[i]; + } + } + // Add the final part if any + if (part) parts.push(part); + + // Check if any part has a Windows reserved name + if ( + parts.some((p) => { + const colonIndex = StringPrototypeIndexOf(p, ":"); + return colonIndex !== -1 && isWindowsReservedName(p, colonIndex); + }) + ) { + // Replace forward slashes with backslashes + let result = ""; + for (let i = 0; i < joined.length; i++) { + result += joined[i] === "/" ? "\\" : joined[i]; + } + return result; + } + + return win32.normalize(joined); + }, + + /** + * It will solve the relative path from `from` to `to`, for instance + * from = 'C:\\orandea\\test\\aaa' + * to = 'C:\\orandea\\impl\\bbb' + * The output of the function should be: '..\\..\\impl\\bbb' + * @param {string} from + * @param {string} to + * @returns {string} + */ + relative(from, to) { + validateString(from, "from"); + validateString(to, "to"); + + if (from === to) return ""; + + const fromOrig = win32.resolve(from); + const toOrig = win32.resolve(to); + + if (fromOrig === toOrig) return ""; + + from = StringPrototypeToLowerCase(fromOrig); + to = StringPrototypeToLowerCase(toOrig); + + if (from === to) return ""; + + if (fromOrig.length !== from.length || toOrig.length !== to.length) { + const fromSplit = StringPrototypeSplit(fromOrig, "\\"); + const toSplit = StringPrototypeSplit(toOrig, "\\"); + if (fromSplit[fromSplit.length - 1] === "") { + fromSplit.pop(); + } + if (toSplit[toSplit.length - 1] === "") { + toSplit.pop(); + } + + const fromLen = fromSplit.length; + const toLen = toSplit.length; + const length = fromLen < toLen ? fromLen : toLen; + + let i; + for (i = 0; i < length; i++) { + if ( + StringPrototypeToLowerCase(fromSplit[i]) !== + StringPrototypeToLowerCase(toSplit[i]) + ) { + break; + } + } + + if (i === 0) { + return toOrig; + } else if (i === length) { + if (toLen > length) { + return ArrayPrototypeJoin(ArrayPrototypeSlice(toSplit, i), "\\"); + } + if (fromLen > length) { + return StringPrototypeRepeat("..\\", fromLen - 1 - i) + ".."; + } + return ""; + } + + return ( + StringPrototypeRepeat("..\\", fromLen - i) + + ArrayPrototypeJoin(ArrayPrototypeSlice(toSplit, i), "\\") + ); + } + + // Trim any leading backslashes + let fromStart = 0; + while ( + fromStart < from.length && + StringPrototypeCharCodeAt(from, fromStart) === CHAR_BACKWARD_SLASH + ) { + fromStart++; + } + // Trim trailing backslashes (applicable to UNC paths only) + let fromEnd = from.length; + while ( + fromEnd - 1 > fromStart && + StringPrototypeCharCodeAt(from, fromEnd - 1) === CHAR_BACKWARD_SLASH + ) { + fromEnd--; + } + const fromLen = fromEnd - fromStart; + + // Trim any leading backslashes + let toStart = 0; + while ( + toStart < to.length && + StringPrototypeCharCodeAt(to, toStart) === CHAR_BACKWARD_SLASH + ) { + toStart++; + } + // Trim trailing backslashes (applicable to UNC paths only) + let toEnd = to.length; + while ( + toEnd - 1 > toStart && + StringPrototypeCharCodeAt(to, toEnd - 1) === CHAR_BACKWARD_SLASH + ) { + toEnd--; + } + const toLen = toEnd - toStart; + + // Compare paths to find the longest common path from root + const length = fromLen < toLen ? fromLen : toLen; + let lastCommonSep = -1; + let i = 0; + for (; i < length; i++) { + const fromCode = StringPrototypeCharCodeAt(from, fromStart + i); + if (fromCode !== StringPrototypeCharCodeAt(to, toStart + i)) break; + else if (fromCode === CHAR_BACKWARD_SLASH) lastCommonSep = i; + } + + // We found a mismatch before the first common path separator was seen, so + // return the original `to`. + if (i !== length) { + if (lastCommonSep === -1) return toOrig; + } else { + if (toLen > length) { + if ( + StringPrototypeCharCodeAt(to, toStart + i) === CHAR_BACKWARD_SLASH + ) { + // We get here if `from` is the exact base path for `to`. + // For example: from='C:\\foo\\bar'; to='C:\\foo\\bar\\baz' + return StringPrototypeSlice(toOrig, toStart + i + 1); + } + if (i === 2) { + // We get here if `from` is the device root. + // For example: from='C:\\'; to='C:\\foo' + return StringPrototypeSlice(toOrig, toStart + i); + } + } + if (fromLen > length) { + if ( + StringPrototypeCharCodeAt(from, fromStart + i) === CHAR_BACKWARD_SLASH + ) { + // We get here if `to` is the exact base path for `from`. + // For example: from='C:\\foo\\bar'; to='C:\\foo' + lastCommonSep = i; + } else if (i === 2) { + // We get here if `to` is the device root. + // For example: from='C:\\foo\\bar'; to='C:\\' + lastCommonSep = 3; + } + } + if (lastCommonSep === -1) lastCommonSep = 0; + } + + let out = ""; + // Generate the relative path based on the path difference between `to` and + // `from` + for (i = fromStart + lastCommonSep + 1; i <= fromEnd; ++i) { + if ( + i === fromEnd || + StringPrototypeCharCodeAt(from, i) === CHAR_BACKWARD_SLASH + ) { + out += out.length === 0 ? ".." : "\\.."; + } + } + + toStart += lastCommonSep; + + // Lastly, append the rest of the destination (`to`) path that comes after + // the common path parts + if (out.length > 0) + return `${out}${StringPrototypeSlice(toOrig, toStart, toEnd)}`; + + if (StringPrototypeCharCodeAt(toOrig, toStart) === CHAR_BACKWARD_SLASH) + ++toStart; + return StringPrototypeSlice(toOrig, toStart, toEnd); + }, + + /** + * @param {string} path + * @returns {string} + */ + toNamespacedPath(path) { + // Note: this will *probably* throw somewhere. + if (typeof path !== "string" || path.length === 0) return path; + + const resolvedPath = win32.resolve(path); + + if (resolvedPath.length <= 2) return path; + + if (StringPrototypeCharCodeAt(resolvedPath, 0) === CHAR_BACKWARD_SLASH) { + // Possible UNC root + if (StringPrototypeCharCodeAt(resolvedPath, 1) === CHAR_BACKWARD_SLASH) { + const code = StringPrototypeCharCodeAt(resolvedPath, 2); + if (code !== CHAR_QUESTION_MARK && code !== CHAR_DOT) { + // Matched non-long UNC root, convert the path to a long UNC path + return `\\\\?\\UNC\\${StringPrototypeSlice(resolvedPath, 2)}`; + } + } + } else if ( + isWindowsDeviceRoot(StringPrototypeCharCodeAt(resolvedPath, 0)) && + StringPrototypeCharCodeAt(resolvedPath, 1) === CHAR_COLON && + StringPrototypeCharCodeAt(resolvedPath, 2) === CHAR_BACKWARD_SLASH + ) { + // Matched device root, convert the path to a long UNC path + return `\\\\?\\${resolvedPath}`; + } + + return resolvedPath; + }, + + /** + * @param {string} path + * @returns {string} + */ + dirname(path) { + validateString(path, "path"); + const len = path.length; + if (len === 0) return "."; + let rootEnd = -1; + let offset = 0; + const code = StringPrototypeCharCodeAt(path, 0); + + if (len === 1) { + // `path` contains just a path separator, exit early to avoid + // unnecessary work or a dot. + return isPathSeparator(code) ? path : "."; + } + + // Try to match a root + if (isPathSeparator(code)) { + // Possible UNC root + + rootEnd = offset = 1; + + if (isPathSeparator(StringPrototypeCharCodeAt(path, 1))) { + // Matched double path separator at beginning + let j = 2; + let last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more path separators + while ( + j < len && + isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j === len) { + // We matched a UNC root only + return path; + } + if (j !== last) { + // We matched a UNC root with leftovers + + // Offset by 1 to include the separator after the UNC root to + // treat it as a "normal root" on top of a (UNC) root + rootEnd = offset = j + 1; + } + } + } + } + // Possible device root + } else if ( + isWindowsDeviceRoot(code) && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON + ) { + rootEnd = + len > 2 && isPathSeparator(StringPrototypeCharCodeAt(path, 2)) ? 3 : 2; + offset = rootEnd; + } + + let end = -1; + let matchedSlash = true; + for (let i = len - 1; i >= offset; --i) { + if (isPathSeparator(StringPrototypeCharCodeAt(path, i))) { + if (!matchedSlash) { + end = i; + break; + } + } else { + // We saw the first non-path separator + matchedSlash = false; + } + } + + if (end === -1) { + if (rootEnd === -1) return "."; + + end = rootEnd; + } + return StringPrototypeSlice(path, 0, end); + }, + + /** + * @param {string} path + * @param {string} [suffix] + * @returns {string} + */ + basename(path, suffix) { + if (suffix !== undefined) validateString(suffix, "suffix"); + validateString(path, "path"); + let start = 0; + let end = -1; + let matchedSlash = true; + + // Check for a drive letter prefix so as not to mistake the following + // path separator as an extra separator at the end of the path that can be + // disregarded + if ( + path.length >= 2 && + isWindowsDeviceRoot(StringPrototypeCharCodeAt(path, 0)) && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON + ) { + start = 2; + } + + if ( + suffix !== undefined && + suffix.length > 0 && + suffix.length <= path.length + ) { + if (suffix === path) return ""; + let extIdx = suffix.length - 1; + let firstNonSlashEnd = -1; + for (let i = path.length - 1; i >= start; --i) { + const code = StringPrototypeCharCodeAt(path, i); + if (isPathSeparator(code)) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + start = i + 1; + break; + } + } else { + if (firstNonSlashEnd === -1) { + // We saw the first non-path separator, remember this index in case + // we need it if the extension ends up not matching + matchedSlash = false; + firstNonSlashEnd = i + 1; + } + if (extIdx >= 0) { + // Try to match the explicit extension + if (code === StringPrototypeCharCodeAt(suffix, extIdx)) { + if (--extIdx === -1) { + // We matched the extension, so mark this as the end of our path + // component + end = i; + } + } else { + // Extension does not match, so our result is the entire path + // component + extIdx = -1; + end = firstNonSlashEnd; + } + } + } + } + + if (start === end) end = firstNonSlashEnd; + else if (end === -1) end = path.length; + return StringPrototypeSlice(path, start, end); + } + for (let i = path.length - 1; i >= start; --i) { + if (isPathSeparator(StringPrototypeCharCodeAt(path, i))) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + start = i + 1; + break; + } + } else if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // path component + matchedSlash = false; + end = i + 1; + } + } + + if (end === -1) return ""; + return StringPrototypeSlice(path, start, end); + }, + + /** + * @param {string} path + * @returns {string} + */ + extname(path) { + validateString(path, "path"); + let start = 0; + let startDot = -1; + let startPart = 0; + let end = -1; + let matchedSlash = true; + // Track the state of characters (if any) we see before our first dot and + // after any path separator we find + let preDotState = 0; + + // Check for a drive letter prefix so as not to mistake the following + // path separator as an extra separator at the end of the path that can be + // disregarded + + if ( + path.length >= 2 && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON && + isWindowsDeviceRoot(StringPrototypeCharCodeAt(path, 0)) + ) { + start = startPart = 2; + } + + for (let i = path.length - 1; i >= start; --i) { + const code = StringPrototypeCharCodeAt(path, i); + if (isPathSeparator(code)) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + startPart = i + 1; + break; + } + continue; + } + if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // extension + matchedSlash = false; + end = i + 1; + } + if (code === CHAR_DOT) { + // If this is our first dot, mark it as the start of our extension + if (startDot === -1) startDot = i; + else if (preDotState !== 1) preDotState = 1; + } else if (startDot !== -1) { + // We saw a non-dot and non-path separator before our dot, so we should + // have a good chance at having a non-empty extension + preDotState = -1; + } + } + + if ( + startDot === -1 || + end === -1 || + // We saw a non-dot character immediately before the dot + preDotState === 0 || + // The (right-most) trimmed path component is exactly '..' + (preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) + ) { + return ""; + } + return StringPrototypeSlice(path, startDot, end); + }, + + format: FunctionPrototypeBind(_format, null, "\\"), + + /** + * @param {string} path + * @returns {{ + * dir: string; + * root: string; + * base: string; + * name: string; + * ext: string; + * }} + */ + parse(path) { + validateString(path, "path"); + + const ret = { root: "", dir: "", base: "", ext: "", name: "" }; + if (path.length === 0) return ret; + + const len = path.length; + let rootEnd = 0; + let code = StringPrototypeCharCodeAt(path, 0); + + if (len === 1) { + if (isPathSeparator(code)) { + // `path` contains just a path separator, exit early to avoid + // unnecessary work + ret.root = ret.dir = path; + return ret; + } + ret.base = ret.name = path; + return ret; + } + // Try to match a root + if (isPathSeparator(code)) { + // Possible UNC root + + rootEnd = 1; + if (isPathSeparator(StringPrototypeCharCodeAt(path, 1))) { + // Matched double path separator at beginning + let j = 2; + let last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more path separators + while ( + j < len && + isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j < len && j !== last) { + // Matched! + last = j; + // Match 1 or more non-path separators + while ( + j < len && + !isPathSeparator(StringPrototypeCharCodeAt(path, j)) + ) { + j++; + } + if (j === len) { + // We matched a UNC root only + rootEnd = j; + } else if (j !== last) { + // We matched a UNC root with leftovers + rootEnd = j + 1; + } + } + } + } + } else if ( + isWindowsDeviceRoot(code) && + StringPrototypeCharCodeAt(path, 1) === CHAR_COLON + ) { + // Possible device root + if (len <= 2) { + // `path` contains just a drive root, exit early to avoid + // unnecessary work + ret.root = ret.dir = path; + return ret; + } + rootEnd = 2; + if (isPathSeparator(StringPrototypeCharCodeAt(path, 2))) { + if (len === 3) { + // `path` contains just a drive root, exit early to avoid + // unnecessary work + ret.root = ret.dir = path; + return ret; + } + rootEnd = 3; + } + } + if (rootEnd > 0) ret.root = StringPrototypeSlice(path, 0, rootEnd); + + let startDot = -1; + let startPart = rootEnd; + let end = -1; + let matchedSlash = true; + let i = path.length - 1; + + // Track the state of characters (if any) we see before our first dot and + // after any path separator we find + let preDotState = 0; + + // Get non-dir info + for (; i >= rootEnd; --i) { + code = StringPrototypeCharCodeAt(path, i); + if (isPathSeparator(code)) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + startPart = i + 1; + break; + } + continue; + } + if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // extension + matchedSlash = false; + end = i + 1; + } + if (code === CHAR_DOT) { + // If this is our first dot, mark it as the start of our extension + if (startDot === -1) startDot = i; + else if (preDotState !== 1) preDotState = 1; + } else if (startDot !== -1) { + // We saw a non-dot and non-path separator before our dot, so we should + // have a good chance at having a non-empty extension + preDotState = -1; + } + } + + if (end !== -1) { + if ( + startDot === -1 || + // We saw a non-dot character immediately before the dot + preDotState === 0 || + // The (right-most) trimmed path component is exactly '..' + (preDotState === 1 && + startDot === end - 1 && + startDot === startPart + 1) + ) { + ret.base = ret.name = StringPrototypeSlice(path, startPart, end); + } else { + ret.name = StringPrototypeSlice(path, startPart, startDot); + ret.base = StringPrototypeSlice(path, startPart, end); + ret.ext = StringPrototypeSlice(path, startDot, end); + } + } + + // If the directory is the root, use the entire root as the `dir` including + // the trailing slash if any (`C:\abc` -> `C:\`). Otherwise, strip out the + // trailing slash (`C:\abc\def` -> `C:\abc`). + if (startPart > 0 && startPart !== rootEnd) + ret.dir = StringPrototypeSlice(path, 0, startPart - 1); + else ret.dir = ret.root; + + return ret; + }, + + matchesGlob(path, pattern) { + return lazyMatchGlobPattern()(path, pattern, true); + }, + + sep: "\\", + delimiter: ";", + win32: null, + posix: null, +}; + +const posixCwd = (() => { + if (isWindows) { + // Converts Windows' backslash path separators to POSIX forward slashes + // and truncates any drive indicator + const regexp = /\\/g; + return () => { + const cwd = StringPrototypeReplace(process.cwd(), regexp, "/"); + return StringPrototypeSlice(cwd, StringPrototypeIndexOf(cwd, "/")); + }; + } + + // We're already on POSIX, no need for any transformations + return () => process.cwd(); +})(); + +const posix = { + /** + * path.resolve([from ...], to) + * @param {...string} args + * @returns {string} + */ + resolve(...args) { + if ( + args.length === 0 || + (args.length === 1 && (args[0] === "" || args[0] === ".")) + ) { + const cwd = posixCwd(); + if (StringPrototypeCharCodeAt(cwd, 0) === CHAR_FORWARD_SLASH) { + return cwd; + } + } + let resolvedPath = ""; + let resolvedAbsolute = false; + + for (let i = args.length - 1; i >= 0 && !resolvedAbsolute; i--) { + const path = args[i]; + validateString(path, `paths[${i}]`); + + // Skip empty entries + if (path.length === 0) { + continue; + } + + resolvedPath = `${path}/${resolvedPath}`; + resolvedAbsolute = + StringPrototypeCharCodeAt(path, 0) === CHAR_FORWARD_SLASH; + } + + if (!resolvedAbsolute) { + const cwd = posixCwd(); + resolvedPath = `${cwd}/${resolvedPath}`; + resolvedAbsolute = + StringPrototypeCharCodeAt(cwd, 0) === CHAR_FORWARD_SLASH; + } + + // At this point the path should be resolved to a full absolute path, but + // handle relative paths to be safe (might happen when process.cwd() fails) + + // Normalize the path + resolvedPath = normalizeString( + resolvedPath, + !resolvedAbsolute, + "/", + isPosixPathSeparator + ); + + if (resolvedAbsolute) { + return `/${resolvedPath}`; + } + return resolvedPath.length > 0 ? resolvedPath : "."; + }, + + /** + * @param {string} path + * @returns {string} + */ + normalize(path) { + validateString(path, "path"); + + if (path.length === 0) return "."; + + const isAbsolute = + StringPrototypeCharCodeAt(path, 0) === CHAR_FORWARD_SLASH; + const trailingSeparator = + StringPrototypeCharCodeAt(path, path.length - 1) === CHAR_FORWARD_SLASH; + + // Normalize the path + path = normalizeString(path, !isAbsolute, "/", isPosixPathSeparator); + + if (path.length === 0) { + if (isAbsolute) return "/"; + return trailingSeparator ? "./" : "."; + } + if (trailingSeparator) path += "/"; + + return isAbsolute ? `/${path}` : path; + }, + + /** + * @param {string} path + * @returns {boolean} + */ + isAbsolute(path) { + validateString(path, "path"); + return ( + path.length > 0 && + StringPrototypeCharCodeAt(path, 0) === CHAR_FORWARD_SLASH + ); + }, + + /** + * @param {...string} args + * @returns {string} + */ + join(...args) { + if (args.length === 0) return "."; + + const path = []; + for (let i = 0; i < args.length; ++i) { + const arg = args[i]; + validateString(arg, "path"); + if (arg.length > 0) { + path.push(arg); + } + } + + if (path.length === 0) return "."; + + return posix.normalize(ArrayPrototypeJoin(path, "/")); + }, + + /** + * @param {string} from + * @param {string} to + * @returns {string} + */ + relative(from, to) { + validateString(from, "from"); + validateString(to, "to"); + + if (from === to) return ""; + + // Trim leading forward slashes. + from = posix.resolve(from); + to = posix.resolve(to); + + if (from === to) return ""; + + const fromStart = 1; + const fromEnd = from.length; + const fromLen = fromEnd - fromStart; + const toStart = 1; + const toLen = to.length - toStart; + + // Compare paths to find the longest common path from root + const length = fromLen < toLen ? fromLen : toLen; + let lastCommonSep = -1; + let i = 0; + for (; i < length; i++) { + const fromCode = StringPrototypeCharCodeAt(from, fromStart + i); + if (fromCode !== StringPrototypeCharCodeAt(to, toStart + i)) break; + else if (fromCode === CHAR_FORWARD_SLASH) lastCommonSep = i; + } + if (i === length) { + if (toLen > length) { + if (StringPrototypeCharCodeAt(to, toStart + i) === CHAR_FORWARD_SLASH) { + // We get here if `from` is the exact base path for `to`. + // For example: from='/foo/bar'; to='/foo/bar/baz' + return StringPrototypeSlice(to, toStart + i + 1); + } + if (i === 0) { + // We get here if `from` is the root + // For example: from='/'; to='/foo' + return StringPrototypeSlice(to, toStart + i); + } + } else if (fromLen > length) { + if ( + StringPrototypeCharCodeAt(from, fromStart + i) === CHAR_FORWARD_SLASH + ) { + // We get here if `to` is the exact base path for `from`. + // For example: from='/foo/bar/baz'; to='/foo/bar' + lastCommonSep = i; + } else if (i === 0) { + // We get here if `to` is the root. + // For example: from='/foo/bar'; to='/' + lastCommonSep = 0; + } + } + } + + let out = ""; + // Generate the relative path based on the path difference between `to` + // and `from`. + for (i = fromStart + lastCommonSep + 1; i <= fromEnd; ++i) { + if ( + i === fromEnd || + StringPrototypeCharCodeAt(from, i) === CHAR_FORWARD_SLASH + ) { + out += out.length === 0 ? ".." : "/.."; + } + } + + // Lastly, append the rest of the destination (`to`) path that comes after + // the common path parts. + return `${out}${StringPrototypeSlice(to, toStart + lastCommonSep)}`; + }, + + /** + * @param {string} path + * @returns {string} + */ + toNamespacedPath(path) { + // Non-op on posix systems + return path; + }, + + /** + * @param {string} path + * @returns {string} + */ + dirname(path) { + validateString(path, "path"); + if (path.length === 0) return "."; + const hasRoot = StringPrototypeCharCodeAt(path, 0) === CHAR_FORWARD_SLASH; + let end = -1; + let matchedSlash = true; + for (let i = path.length - 1; i >= 1; --i) { + if (StringPrototypeCharCodeAt(path, i) === CHAR_FORWARD_SLASH) { + if (!matchedSlash) { + end = i; + break; + } + } else { + // We saw the first non-path separator + matchedSlash = false; + } + } + + if (end === -1) return hasRoot ? "/" : "."; + if (hasRoot && end === 1) return "//"; + return StringPrototypeSlice(path, 0, end); + }, + + /** + * @param {string} path + * @param {string} [suffix] + * @returns {string} + */ + basename(path, suffix) { + if (suffix !== undefined) validateString(suffix, "suffix"); + validateString(path, "path"); + + let start = 0; + let end = -1; + let matchedSlash = true; + + if ( + suffix !== undefined && + suffix.length > 0 && + suffix.length <= path.length + ) { + if (suffix === path) return ""; + let extIdx = suffix.length - 1; + let firstNonSlashEnd = -1; + for (let i = path.length - 1; i >= 0; --i) { + const code = StringPrototypeCharCodeAt(path, i); + if (code === CHAR_FORWARD_SLASH) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + start = i + 1; + break; + } + } else { + if (firstNonSlashEnd === -1) { + // We saw the first non-path separator, remember this index in case + // we need it if the extension ends up not matching + matchedSlash = false; + firstNonSlashEnd = i + 1; + } + if (extIdx >= 0) { + // Try to match the explicit extension + if (code === StringPrototypeCharCodeAt(suffix, extIdx)) { + if (--extIdx === -1) { + // We matched the extension, so mark this as the end of our path + // component + end = i; + } + } else { + // Extension does not match, so our result is the entire path + // component + extIdx = -1; + end = firstNonSlashEnd; + } + } + } + } + + if (start === end) end = firstNonSlashEnd; + else if (end === -1) end = path.length; + return StringPrototypeSlice(path, start, end); + } + for (let i = path.length - 1; i >= 0; --i) { + if (StringPrototypeCharCodeAt(path, i) === CHAR_FORWARD_SLASH) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + start = i + 1; + break; + } + } else if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // path component + matchedSlash = false; + end = i + 1; + } + } + + if (end === -1) return ""; + return StringPrototypeSlice(path, start, end); + }, + + /** + * @param {string} path + * @returns {string} + */ + extname(path) { + validateString(path, "path"); + let startDot = -1; + let startPart = 0; + let end = -1; + let matchedSlash = true; + // Track the state of characters (if any) we see before our first dot and + // after any path separator we find + let preDotState = 0; + for (let i = path.length - 1; i >= 0; --i) { + const char = path[i]; + if (char === "/") { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + startPart = i + 1; + break; + } + continue; + } + if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // extension + matchedSlash = false; + end = i + 1; + } + if (char === ".") { + // If this is our first dot, mark it as the start of our extension + if (startDot === -1) startDot = i; + else if (preDotState !== 1) preDotState = 1; + } else if (startDot !== -1) { + // We saw a non-dot and non-path separator before our dot, so we should + // have a good chance at having a non-empty extension + preDotState = -1; + } + } + + if ( + startDot === -1 || + end === -1 || + // We saw a non-dot character immediately before the dot + preDotState === 0 || + // The (right-most) trimmed path component is exactly '..' + (preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) + ) { + return ""; + } + return StringPrototypeSlice(path, startDot, end); + }, + + format: FunctionPrototypeBind(_format, null, "/"), + + /** + * @param {string} path + * @returns {{ + * dir: string; + * root: string; + * base: string; + * name: string; + * ext: string; + * }} + */ + parse(path) { + validateString(path, "path"); + + const ret = { root: "", dir: "", base: "", ext: "", name: "" }; + if (path.length === 0) return ret; + const isAbsolute = + StringPrototypeCharCodeAt(path, 0) === CHAR_FORWARD_SLASH; + let start; + if (isAbsolute) { + ret.root = "/"; + start = 1; + } else { + start = 0; + } + let startDot = -1; + let startPart = 0; + let end = -1; + let matchedSlash = true; + let i = path.length - 1; + + // Track the state of characters (if any) we see before our first dot and + // after any path separator we find + let preDotState = 0; + + // Get non-dir info + for (; i >= start; --i) { + const code = StringPrototypeCharCodeAt(path, i); + if (code === CHAR_FORWARD_SLASH) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + startPart = i + 1; + break; + } + continue; + } + if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // extension + matchedSlash = false; + end = i + 1; + } + if (code === CHAR_DOT) { + // If this is our first dot, mark it as the start of our extension + if (startDot === -1) startDot = i; + else if (preDotState !== 1) preDotState = 1; + } else if (startDot !== -1) { + // We saw a non-dot and non-path separator before our dot, so we should + // have a good chance at having a non-empty extension + preDotState = -1; + } + } + + if (end !== -1) { + const start = startPart === 0 && isAbsolute ? 1 : startPart; + if ( + startDot === -1 || + // We saw a non-dot character immediately before the dot + preDotState === 0 || + // The (right-most) trimmed path component is exactly '..' + (preDotState === 1 && + startDot === end - 1 && + startDot === startPart + 1) + ) { + ret.base = ret.name = StringPrototypeSlice(path, start, end); + } else { + ret.name = StringPrototypeSlice(path, start, startDot); + ret.base = StringPrototypeSlice(path, start, end); + ret.ext = StringPrototypeSlice(path, startDot, end); + } + } + + if (startPart > 0) ret.dir = StringPrototypeSlice(path, 0, startPart - 1); + else if (isAbsolute) ret.dir = "/"; + + return ret; + }, + + matchesGlob(path, pattern) { + return lazyMatchGlobPattern()(path, pattern, false); + }, + + sep: "/", + delimiter: ":", + win32: null, + posix: null, +}; + +posix.win32 = win32.win32 = win32; +posix.posix = win32.posix = posix; + +// Legacy internal API, docs-only deprecated: DEP0080 +win32._makeLong = win32.toNamespacedPath; +posix._makeLong = posix.toNamespacedPath; + +module.exports = isWindows ? win32 : posix; diff --git a/.codesandbox/node/perf_hooks.js b/.codesandbox/node/perf_hooks.js new file mode 100644 index 00000000..3abbec84 --- /dev/null +++ b/.codesandbox/node/perf_hooks.js @@ -0,0 +1,47 @@ +"use strict"; + +const { ObjectDefineProperty } = primordials; + +const { constants } = internalBinding("performance"); + +const { PerformanceEntry } = require("internal/perf/performance_entry"); +const { PerformanceResourceTiming } = require("internal/perf/resource_timing"); +const { + PerformanceObserver, + PerformanceObserverEntryList, +} = require("internal/perf/observe"); +const { + PerformanceMark, + PerformanceMeasure, +} = require("internal/perf/usertiming"); +const { Performance, performance } = require("internal/perf/performance"); + +const { createHistogram } = require("internal/histogram"); + +const monitorEventLoopDelay = require("internal/perf/event_loop_delay"); +const { + eventLoopUtilization, +} = require("internal/perf/event_loop_utilization"); +const timerify = require("internal/perf/timerify"); + +module.exports = { + Performance, + PerformanceEntry, + PerformanceMark, + PerformanceMeasure, + PerformanceObserver, + PerformanceObserverEntryList, + PerformanceResourceTiming, + monitorEventLoopDelay, + eventLoopUtilization, + timerify, + createHistogram, + performance, +}; + +ObjectDefineProperty(module.exports, "constants", { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, +}); diff --git a/.codesandbox/node/process.js b/.codesandbox/node/process.js new file mode 100644 index 00000000..bdfab0ab --- /dev/null +++ b/.codesandbox/node/process.js @@ -0,0 +1,4 @@ +"use strict"; + +// Re-export process as a built-in module +module.exports = process; diff --git a/.codesandbox/node/querystring.js b/.codesandbox/node/querystring.js new file mode 100644 index 00000000..2f705b59 --- /dev/null +++ b/.codesandbox/node/querystring.js @@ -0,0 +1,790 @@ +"use strict"; + +const { + Array, + ArrayIsArray, + Int8Array, + MathAbs, + NumberIsFinite, + ObjectKeys, + String, + StringPrototypeCharCodeAt, + StringPrototypeSlice, + decodeURIComponent, +} = primordials; + +const { Buffer } = require("buffer"); +const { encodeStr, hexTable, isHexTable } = require("internal/querystring"); +const QueryString = (module.exports = { + unescapeBuffer, + // `unescape()` is a JS global, so we need to use a different local name + unescape: qsUnescape, + + // `escape()` is a JS global, so we need to use a different local name + escape: qsEscape, + + stringify, + encode: stringify, + + parse, + decode: parse, +}); + +const unhexTable = new Int8Array([ + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // 0 - 15 + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // 16 - 31 + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // 32 - 47 + +0, + +1, + +2, + +3, + +4, + +5, + +6, + +7, + +8, + +9, + -1, + -1, + -1, + -1, + -1, + -1, // 48 - 63 + -1, + 10, + 11, + 12, + 13, + 14, + 15, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // 64 - 79 + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // 80 - 95 + -1, + 10, + 11, + 12, + 13, + 14, + 15, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // 96 - 111 + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // 112 - 127 + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // 128 ... + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, + -1, // ... 255 +]); +/** + * A safe fast alternative to decodeURIComponent + * @param {string} s + * @param {boolean} decodeSpaces + * @returns {string} + */ +function unescapeBuffer(s, decodeSpaces) { + const out = Buffer.allocUnsafe(s.length); + let index = 0; + let outIndex = 0; + let currentChar; + let nextChar; + let hexHigh; + let hexLow; + const maxLength = s.length - 2; + // Flag to know if some hex chars have been decoded + let hasHex = false; + while (index < s.length) { + currentChar = StringPrototypeCharCodeAt(s, index); + if (currentChar === 43 /* '+' */ && decodeSpaces) { + out[outIndex++] = 32; // ' ' + index++; + continue; + } + if (currentChar === 37 /* '%' */ && index < maxLength) { + currentChar = StringPrototypeCharCodeAt(s, ++index); + hexHigh = unhexTable[currentChar]; + if (!(hexHigh >= 0)) { + out[outIndex++] = 37; // '%' + continue; + } else { + nextChar = StringPrototypeCharCodeAt(s, ++index); + hexLow = unhexTable[nextChar]; + if (!(hexLow >= 0)) { + out[outIndex++] = 37; // '%' + index--; + } else { + hasHex = true; + currentChar = hexHigh * 16 + hexLow; + } + } + } + out[outIndex++] = currentChar; + index++; + } + return hasHex ? out.slice(0, outIndex) : out; +} + +/** + * @param {string} s + * @param {boolean} decodeSpaces + * @returns {string} + */ +function qsUnescape(s, decodeSpaces) { + try { + return decodeURIComponent(s); + } catch { + return QueryString.unescapeBuffer(s, decodeSpaces).toString(); + } +} + +// These characters do not need escaping when generating query strings: +// ! - . _ ~ +// ' ( ) * +// digits +// alpha (uppercase) +// alpha (lowercase) +const noEscape = new Int8Array([ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, // 0 - 15 + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, // 16 - 31 + 0, + 1, + 0, + 0, + 0, + 0, + 0, + 1, + 1, + 1, + 1, + 0, + 0, + 1, + 1, + 0, // 32 - 47 + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 0, + 0, + 0, + 0, + 0, + 0, // 48 - 63 + 0, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, // 64 - 79 + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 0, + 0, + 0, + 0, + 1, // 80 - 95 + 0, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, // 96 - 111 + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 1, + 0, + 0, + 0, + 1, + 0, // 112 - 127 +]); + +/** + * QueryString.escape() replaces encodeURIComponent() + * @see https://www.ecma-international.org/ecma-262/5.1/#sec-15.1.3.4 + * @param {any} str + * @returns {string} + */ +function qsEscape(str) { + if (typeof str !== "string") { + if (typeof str === "object") str = String(str); + else str += ""; + } + + return encodeStr(str, noEscape, hexTable); +} + +/** + * @param {string | number | bigint | boolean | symbol | undefined | null} v + * @returns {string} + */ +function stringifyPrimitive(v) { + if (typeof v === "string") return v; + if (typeof v === "number" && NumberIsFinite(v)) return "" + v; + if (typeof v === "bigint") return "" + v; + if (typeof v === "boolean") return v ? "true" : "false"; + return ""; +} + +/** + * @param {string | number | bigint | boolean} v + * @param {(v: string) => string} encode + * @returns {string} + */ +function encodeStringified(v, encode) { + if (typeof v === "string") return v.length ? encode(v) : ""; + if (typeof v === "number" && NumberIsFinite(v)) { + // Values >= 1e21 automatically switch to scientific notation which requires + // escaping due to the inclusion of a '+' in the output + return MathAbs(v) < 1e21 ? "" + v : encode("" + v); + } + if (typeof v === "bigint") return "" + v; + if (typeof v === "boolean") return v ? "true" : "false"; + return ""; +} + +/** + * @param {string | number | boolean | null} v + * @param {(v: string) => string} encode + * @returns {string} + */ +function encodeStringifiedCustom(v, encode) { + return encode(stringifyPrimitive(v)); +} + +/** + * @param {Record | null>} obj + * @param {string} [sep] + * @param {string} [eq] + * @param {{ encodeURIComponent?: (v: string) => string }} [options] + * @returns {string} + */ +function stringify(obj, sep, eq, options) { + sep ||= "&"; + eq ||= "="; + + let encode = QueryString.escape; + if (options && typeof options.encodeURIComponent === "function") { + encode = options.encodeURIComponent; + } + const convert = + encode === qsEscape ? encodeStringified : encodeStringifiedCustom; + + if (obj !== null && typeof obj === "object") { + const keys = ObjectKeys(obj); + const len = keys.length; + let fields = ""; + for (let i = 0; i < len; ++i) { + const k = keys[i]; + const v = obj[k]; + let ks = convert(k, encode); + ks += eq; + + if (ArrayIsArray(v)) { + const vlen = v.length; + if (vlen === 0) continue; + if (fields) fields += sep; + for (let j = 0; j < vlen; ++j) { + if (j) fields += sep; + fields += ks; + fields += convert(v[j], encode); + } + } else { + if (fields) fields += sep; + fields += ks; + fields += convert(v, encode); + } + } + return fields; + } + return ""; +} + +/** + * @param {string} str + * @returns {number[]} + */ +function charCodes(str) { + if (str.length === 0) return []; + if (str.length === 1) return [StringPrototypeCharCodeAt(str, 0)]; + const ret = new Array(str.length); + for (let i = 0; i < str.length; ++i) + ret[i] = StringPrototypeCharCodeAt(str, i); + return ret; +} +const defSepCodes = [38]; // & +const defEqCodes = [61]; // = + +function addKeyVal(obj, key, value, keyEncoded, valEncoded, decode) { + if (key.length > 0 && keyEncoded) key = decodeStr(key, decode); + if (value.length > 0 && valEncoded) value = decodeStr(value, decode); + + if (obj[key] === undefined) { + obj[key] = value; + } else { + const curValue = obj[key]; + // A simple Array-specific property check is enough here to + // distinguish from a string value and is faster and still safe + // since we are generating all of the values being assigned. + if (curValue.pop) curValue[curValue.length] = value; + else obj[key] = [curValue, value]; + } +} + +/** + * Parse a key/val string. + * @param {string} qs + * @param {string} sep + * @param {string} eq + * @param {{ + * maxKeys?: number, + * decodeURIComponent?: (v: string) => string, + * }} [options] + * @returns {Record} + */ +function parse(qs, sep, eq, options) { + const obj = { __proto__: null }; + + if (typeof qs !== "string" || qs.length === 0) { + return obj; + } + + const sepCodes = !sep ? defSepCodes : charCodes(String(sep)); + const eqCodes = !eq ? defEqCodes : charCodes(String(eq)); + const sepLen = sepCodes.length; + const eqLen = eqCodes.length; + + let pairs = 1000; + if (options && typeof options.maxKeys === "number") { + // -1 is used in place of a value like Infinity for meaning + // "unlimited pairs" because of additional checks V8 (at least as of v5.4) + // has to do when using variables that contain values like Infinity. Since + // `pairs` is always decremented and checked explicitly for 0, -1 works + // effectively the same as Infinity, while providing a significant + // performance boost. + pairs = options.maxKeys > 0 ? options.maxKeys : -1; + } + + let decode = QueryString.unescape; + if (options && typeof options.decodeURIComponent === "function") { + decode = options.decodeURIComponent; + } + const customDecode = decode !== qsUnescape; + + let lastPos = 0; + let sepIdx = 0; + let eqIdx = 0; + let key = ""; + let value = ""; + let keyEncoded = customDecode; + let valEncoded = customDecode; + const plusChar = customDecode ? "%20" : " "; + let encodeCheck = 0; + for (let i = 0; i < qs.length; ++i) { + const code = StringPrototypeCharCodeAt(qs, i); + + // Try matching key/value pair separator (e.g. '&') + if (code === sepCodes[sepIdx]) { + if (++sepIdx === sepLen) { + // Key/value pair separator match! + const end = i - sepIdx + 1; + if (eqIdx < eqLen) { + // We didn't find the (entire) key/value separator + if (lastPos < end) { + // Treat the substring as part of the key instead of the value + key += StringPrototypeSlice(qs, lastPos, end); + } else if (key.length === 0) { + // We saw an empty substring between separators + if (--pairs === 0) return obj; + lastPos = i + 1; + sepIdx = eqIdx = 0; + continue; + } + } else if (lastPos < end) { + value += StringPrototypeSlice(qs, lastPos, end); + } + + addKeyVal(obj, key, value, keyEncoded, valEncoded, decode); + + if (--pairs === 0) return obj; + keyEncoded = valEncoded = customDecode; + key = value = ""; + encodeCheck = 0; + lastPos = i + 1; + sepIdx = eqIdx = 0; + } + } else { + sepIdx = 0; + // Try matching key/value separator (e.g. '=') if we haven't already + if (eqIdx < eqLen) { + if (code === eqCodes[eqIdx]) { + if (++eqIdx === eqLen) { + // Key/value separator match! + const end = i - eqIdx + 1; + if (lastPos < end) key += StringPrototypeSlice(qs, lastPos, end); + encodeCheck = 0; + lastPos = i + 1; + } + continue; + } else { + eqIdx = 0; + if (!keyEncoded) { + // Try to match an (valid) encoded byte once to minimize unnecessary + // calls to string decoding functions + if (code === 37 /* % */) { + encodeCheck = 1; + continue; + } else if (encodeCheck > 0) { + if (isHexTable[code] === 1) { + if (++encodeCheck === 3) keyEncoded = true; + continue; + } else { + encodeCheck = 0; + } + } + } + } + if (code === 43 /* + */) { + if (lastPos < i) key += StringPrototypeSlice(qs, lastPos, i); + key += plusChar; + lastPos = i + 1; + continue; + } + } + if (code === 43 /* + */) { + if (lastPos < i) value += StringPrototypeSlice(qs, lastPos, i); + value += plusChar; + lastPos = i + 1; + } else if (!valEncoded) { + // Try to match an (valid) encoded byte (once) to minimize unnecessary + // calls to string decoding functions + if (code === 37 /* % */) { + encodeCheck = 1; + } else if (encodeCheck > 0) { + if (isHexTable[code] === 1) { + if (++encodeCheck === 3) valEncoded = true; + } else { + encodeCheck = 0; + } + } + } + } + } + + // Deal with any leftover key or value data + if (lastPos < qs.length) { + if (eqIdx < eqLen) key += StringPrototypeSlice(qs, lastPos); + else if (sepIdx < sepLen) value += StringPrototypeSlice(qs, lastPos); + } else if (eqIdx === 0 && key.length === 0) { + // We ended on an empty substring + return obj; + } + + addKeyVal(obj, key, value, keyEncoded, valEncoded, decode); + + return obj; +} + +/** + * V8 does not optimize functions with try-catch blocks, so we isolate them here + * to minimize the damage (Note: no longer true as of V8 5.4 -- but still will + * not be inlined). + * @param {string} s + * @param {(v: string) => string} decoder + * @returns {string} + */ +function decodeStr(s, decoder) { + try { + return decoder(s); + } catch { + return QueryString.unescape(s, true); + } +} diff --git a/.codesandbox/node/readline.js b/.codesandbox/node/readline.js new file mode 100644 index 00000000..fbe177f4 --- /dev/null +++ b/.codesandbox/node/readline.js @@ -0,0 +1,514 @@ +"use strict"; + +const { + DateNow, + FunctionPrototypeBind, + FunctionPrototypeCall, + ObjectDefineProperties, + ObjectSetPrototypeOf, + Promise, + PromiseReject, + StringPrototypeSlice, + SymbolDispose, +} = primordials; + +const { + clearLine, + clearScreenDown, + cursorTo, + moveCursor, +} = require("internal/readline/callbacks"); +const emitKeypressEvents = require("internal/readline/emitKeypressEvents"); +const promises = require("readline/promises"); + +const { AbortError } = require("internal/errors"); +const { inspect } = require("internal/util/inspect"); +const { kEmptyObject, promisify } = require("internal/util"); +const { validateAbortSignal } = require("internal/validators"); + +/** + * @typedef {import('./stream.js').Readable} Readable + * @typedef {import('./stream.js').Writable} Writable + */ + +const { + Interface: _Interface, + InterfaceConstructor, + kAddHistory, + kDecoder, + kDeleteLeft, + kDeleteLineLeft, + kDeleteLineRight, + kDeleteRight, + kDeleteWordLeft, + kDeleteWordRight, + kGetDisplayPos, + kHistoryNext, + kHistoryPrev, + kInsertString, + kLine, + kLine_buffer, + kMoveCursor, + kNormalWrite, + kOldPrompt, + kOnLine, + kPreviousKey, + kPrompt, + kQuestion, + kQuestionCallback, + kQuestionCancel, + kRefreshLine, + kSawKeyPress, + kSawReturnAt, + kSetRawMode, + kTabComplete, + kTabCompleter, + kTtyWrite, + kWordLeft, + kWordRight, + kWriteToOutput, +} = require("internal/readline/interface"); +let addAbortListener; + +function Interface(input, output, completer, terminal) { + if (!(this instanceof Interface)) { + return new Interface(input, output, completer, terminal); + } + + if ( + input?.input && + typeof input.completer === "function" && + input.completer.length !== 2 + ) { + const { completer } = input; + input.completer = (v, cb) => cb(null, completer(v)); + } else if (typeof completer === "function" && completer.length !== 2) { + const realCompleter = completer; + completer = (v, cb) => cb(null, realCompleter(v)); + } + + FunctionPrototypeCall( + InterfaceConstructor, + this, + input, + output, + completer, + terminal + ); + + if (process.env.TERM === "dumb") { + this._ttyWrite = FunctionPrototypeBind(_ttyWriteDumb, this); + } +} + +ObjectSetPrototypeOf(Interface.prototype, _Interface.prototype); +ObjectSetPrototypeOf(Interface, _Interface); + +/** + * Displays `query` by writing it to the `output`. + * @param {string} query + * @param {{ signal?: AbortSignal; }} [options] + * @param {Function} cb + * @returns {void} + */ +Interface.prototype.question = function question(query, options, cb) { + cb = typeof options === "function" ? options : cb; + if (options === null || typeof options !== "object") { + options = kEmptyObject; + } + + if (options.signal) { + validateAbortSignal(options.signal, "options.signal"); + if (options.signal.aborted) { + return; + } + + const onAbort = () => { + this[kQuestionCancel](); + }; + addAbortListener ??= + require("internal/events/abort_listener").addAbortListener; + const disposable = addAbortListener(options.signal, onAbort); + const originalCb = cb; + cb = + typeof cb === "function" + ? (answer) => { + disposable[SymbolDispose](); + return originalCb(answer); + } + : disposable[SymbolDispose]; + } + + if (typeof cb === "function") { + this[kQuestion](query, cb); + } +}; +Interface.prototype.question[promisify.custom] = function question( + query, + options +) { + if (options === null || typeof options !== "object") { + options = kEmptyObject; + } + + if (options.signal?.aborted) { + return PromiseReject( + new AbortError(undefined, { cause: options.signal.reason }) + ); + } + + return new Promise((resolve, reject) => { + let cb = resolve; + + if (options.signal) { + const onAbort = () => { + reject(new AbortError(undefined, { cause: options.signal.reason })); + }; + addAbortListener ??= + require("internal/events/abort_listener").addAbortListener; + const disposable = addAbortListener(options.signal, onAbort); + cb = (answer) => { + disposable[SymbolDispose](); + resolve(answer); + }; + } + + this.question(query, options, cb); + }); +}; + +/** + * Creates a new `readline.Interface` instance. + * @param {Readable | { + * input: Readable; + * output: Writable; + * completer?: Function; + * terminal?: boolean; + * history?: string[]; + * historySize?: number; + * removeHistoryDuplicates?: boolean; + * prompt?: string; + * crlfDelay?: number; + * escapeCodeTimeout?: number; + * tabSize?: number; + * signal?: AbortSignal; + * }} input + * @param {Writable} [output] + * @param {Function} [completer] + * @param {boolean} [terminal] + * @returns {Interface} + */ +function createInterface(input, output, completer, terminal) { + return new Interface(input, output, completer, terminal); +} + +ObjectDefineProperties(Interface.prototype, { + // Redirect internal prototype methods to the underscore notation for backward + // compatibility. + [kSetRawMode]: { + __proto__: null, + get() { + return this._setRawMode; + }, + }, + [kOnLine]: { + __proto__: null, + get() { + return this._onLine; + }, + }, + [kWriteToOutput]: { + __proto__: null, + get() { + return this._writeToOutput; + }, + }, + [kAddHistory]: { + __proto__: null, + get() { + return this._addHistory; + }, + }, + [kRefreshLine]: { + __proto__: null, + get() { + return this._refreshLine; + }, + }, + [kNormalWrite]: { + __proto__: null, + get() { + return this._normalWrite; + }, + }, + [kInsertString]: { + __proto__: null, + get() { + return this._insertString; + }, + }, + [kTabComplete]: { + __proto__: null, + get() { + return this._tabComplete; + }, + }, + [kWordLeft]: { + __proto__: null, + get() { + return this._wordLeft; + }, + }, + [kWordRight]: { + __proto__: null, + get() { + return this._wordRight; + }, + }, + [kDeleteLeft]: { + __proto__: null, + get() { + return this._deleteLeft; + }, + }, + [kDeleteRight]: { + __proto__: null, + get() { + return this._deleteRight; + }, + }, + [kDeleteWordLeft]: { + __proto__: null, + get() { + return this._deleteWordLeft; + }, + }, + [kDeleteWordRight]: { + __proto__: null, + get() { + return this._deleteWordRight; + }, + }, + [kDeleteLineLeft]: { + __proto__: null, + get() { + return this._deleteLineLeft; + }, + }, + [kDeleteLineRight]: { + __proto__: null, + get() { + return this._deleteLineRight; + }, + }, + [kLine]: { + __proto__: null, + get() { + return this._line; + }, + }, + [kHistoryNext]: { + __proto__: null, + get() { + return this._historyNext; + }, + }, + [kHistoryPrev]: { + __proto__: null, + get() { + return this._historyPrev; + }, + }, + [kGetDisplayPos]: { + __proto__: null, + get() { + return this._getDisplayPos; + }, + }, + [kMoveCursor]: { + __proto__: null, + get() { + return this._moveCursor; + }, + }, + [kTtyWrite]: { + __proto__: null, + get() { + return this._ttyWrite; + }, + }, + + // Defining proxies for the internal instance properties for backward + // compatibility. + _decoder: { + __proto__: null, + get() { + return this[kDecoder]; + }, + set(value) { + this[kDecoder] = value; + }, + }, + _line_buffer: { + __proto__: null, + get() { + return this[kLine_buffer]; + }, + set(value) { + this[kLine_buffer] = value; + }, + }, + _oldPrompt: { + __proto__: null, + get() { + return this[kOldPrompt]; + }, + set(value) { + this[kOldPrompt] = value; + }, + }, + _previousKey: { + __proto__: null, + get() { + return this[kPreviousKey]; + }, + set(value) { + this[kPreviousKey] = value; + }, + }, + _prompt: { + __proto__: null, + get() { + return this[kPrompt]; + }, + set(value) { + this[kPrompt] = value; + }, + }, + _questionCallback: { + __proto__: null, + get() { + return this[kQuestionCallback]; + }, + set(value) { + this[kQuestionCallback] = value; + }, + }, + _sawKeyPress: { + __proto__: null, + get() { + return this[kSawKeyPress]; + }, + set(value) { + this[kSawKeyPress] = value; + }, + }, + _sawReturnAt: { + __proto__: null, + get() { + return this[kSawReturnAt]; + }, + set(value) { + this[kSawReturnAt] = value; + }, + }, +}); + +// Make internal methods public for backward compatibility. +Interface.prototype._setRawMode = _Interface.prototype[kSetRawMode]; +Interface.prototype._onLine = _Interface.prototype[kOnLine]; +Interface.prototype._writeToOutput = _Interface.prototype[kWriteToOutput]; +Interface.prototype._addHistory = _Interface.prototype[kAddHistory]; +Interface.prototype._refreshLine = _Interface.prototype[kRefreshLine]; +Interface.prototype._normalWrite = _Interface.prototype[kNormalWrite]; +Interface.prototype._insertString = _Interface.prototype[kInsertString]; +Interface.prototype._tabComplete = function (lastKeypressWasTab) { + // Overriding parent method because `this.completer` in the legacy + // implementation takes a callback instead of being an async function. + this.pause(); + const string = StringPrototypeSlice(this.line, 0, this.cursor); + this.completer(string, (err, value) => { + this.resume(); + + if (err) { + this._writeToOutput(`Tab completion error: ${inspect(err)}`); + return; + } + + this[kTabCompleter](lastKeypressWasTab, value); + }); +}; +Interface.prototype._wordLeft = _Interface.prototype[kWordLeft]; +Interface.prototype._wordRight = _Interface.prototype[kWordRight]; +Interface.prototype._deleteLeft = _Interface.prototype[kDeleteLeft]; +Interface.prototype._deleteRight = _Interface.prototype[kDeleteRight]; +Interface.prototype._deleteWordLeft = _Interface.prototype[kDeleteWordLeft]; +Interface.prototype._deleteWordRight = _Interface.prototype[kDeleteWordRight]; +Interface.prototype._deleteLineLeft = _Interface.prototype[kDeleteLineLeft]; +Interface.prototype._deleteLineRight = _Interface.prototype[kDeleteLineRight]; +Interface.prototype._line = _Interface.prototype[kLine]; +Interface.prototype._historyNext = _Interface.prototype[kHistoryNext]; +Interface.prototype._historyPrev = _Interface.prototype[kHistoryPrev]; +Interface.prototype._getDisplayPos = _Interface.prototype[kGetDisplayPos]; +Interface.prototype._getCursorPos = _Interface.prototype.getCursorPos; +Interface.prototype._moveCursor = _Interface.prototype[kMoveCursor]; +Interface.prototype._ttyWrite = _Interface.prototype[kTtyWrite]; + +function _ttyWriteDumb(s, key) { + key ||= kEmptyObject; + if (key.name === "escape") return; + + if (this[kSawReturnAt] && key.name !== "enter") this[kSawReturnAt] = 0; + + if (key.ctrl) { + if (key.name === "c") { + if (this.listenerCount("SIGINT") > 0) { + this.emit("SIGINT"); + } else { + // This readline instance is finished + this.close(); + } + + return; + } else if (key.name === "d") { + this.close(); + return; + } + } + + switch (key.name) { + case "return": // Carriage return, i.e. \r + this[kSawReturnAt] = DateNow(); + this._line(); + break; + + case "enter": + // When key interval > crlfDelay + if ( + this[kSawReturnAt] === 0 || + DateNow() - this[kSawReturnAt] > this.crlfDelay + ) { + this._line(); + } + this[kSawReturnAt] = 0; + break; + + default: + if (typeof s === "string" && s) { + this.line += s; + this.cursor += s.length; + this._writeToOutput(s); + } + } +} + +module.exports = { + Interface, + clearLine, + clearScreenDown, + createInterface, + cursorTo, + emitKeypressEvents, + moveCursor, + promises, +}; diff --git a/.codesandbox/node/repl.js b/.codesandbox/node/repl.js new file mode 100644 index 00000000..9bad4999 --- /dev/null +++ b/.codesandbox/node/repl.js @@ -0,0 +1,1488 @@ +"use strict"; + +const { + ArrayPrototypeAt, + ArrayPrototypeFilter, + ArrayPrototypeFindLastIndex, + ArrayPrototypeForEach, + ArrayPrototypeJoin, + ArrayPrototypeMap, + ArrayPrototypePop, + ArrayPrototypePush, + ArrayPrototypeShift, + ArrayPrototypeSlice, + ArrayPrototypeSort, + Boolean, + Error: MainContextError, + FunctionPrototypeBind, + FunctionPrototypeCall, + JSONStringify, + MathMaxApply, + NumberIsNaN, + NumberParseFloat, + ObjectAssign, + ObjectDefineProperty, + ObjectGetOwnPropertyDescriptor, + ObjectGetOwnPropertyNames, + ObjectKeys, + Promise, + ReflectApply, + RegExp, + RegExpPrototypeExec, + SafePromiseRace, + SafeSet, + SafeWeakSet, + StringPrototypeCharAt, + StringPrototypeEndsWith, + StringPrototypeIncludes, + StringPrototypeRepeat, + StringPrototypeSlice, + StringPrototypeStartsWith, + StringPrototypeTrim, + Symbol, + SyntaxError, + globalThis, +} = primordials; + +const { + makeRequireFunction, + addBuiltinLibsToObject, +} = require("internal/modules/helpers"); +const { parse: acornParse } = require("internal/deps/acorn/acorn/dist/acorn"); +const acornWalk = require("internal/deps/acorn/acorn-walk/dist/walk"); +const { + decorateErrorStack, + isError, + deprecate, + SideEffectFreeRegExpPrototypeSymbolReplace, + SideEffectFreeRegExpPrototypeSymbolSplit, +} = require("internal/util"); +const { inspect } = require("internal/util/inspect"); +const vm = require("vm"); + +const { runInThisContext, runInContext } = vm.Script.prototype; + +const path = require("path"); +const fs = require("fs"); +const { Interface } = require("readline"); +const { commonPrefix } = require("internal/readline/utils"); +const { Console } = require("console"); +const { shouldColorize } = require("internal/util/colors"); +const CJSModule = require("internal/modules/cjs/loader").Module; +const domain = require("domain"); +let debug = require("internal/util/debuglog").debuglog("repl", (fn) => { + debug = fn; +}); +const { + ErrorPrepareStackTrace, + codes: { + ERR_CANNOT_WATCH_SIGINT, + ERR_INVALID_REPL_EVAL_CONFIG, + ERR_INVALID_REPL_INPUT, + ERR_MISSING_ARGS, + ERR_SCRIPT_EXECUTION_INTERRUPTED, + }, + isErrorStackTraceLimitWritable, + overrideStackTrace, +} = require("internal/errors"); +const { sendInspectorCommand } = require("internal/util/inspector"); +const { getOptionValue } = require("internal/options"); +const { validateFunction, validateObject } = require("internal/validators"); +const experimentalREPLAwait = getOptionValue("--experimental-repl-await"); +const pendingDeprecation = getOptionValue("--pending-deprecation"); +const { + REPL_MODE_SLOPPY, + REPL_MODE_STRICT, + isRecoverableError, + kStandaloneREPL, + setupPreview, + setupReverseSearch, + isObjectLiteral, + isValidSyntax, + kContextId, + getREPLResourceName, + globalBuiltins, + getReplBuiltinLibs, + setReplBuiltinLibs, + fixReplRequire, +} = require("internal/repl/utils"); +const { complete } = require("internal/repl/completion"); +const { startSigintWatchdog, stopSigintWatchdog } = + internalBinding("contextify"); + +const { makeContextifyScript } = require("internal/vm"); +const { + kMultilinePrompt, + kAddNewLineOnTTY, + kLastCommandErrored, +} = require("internal/readline/interface"); + +// Lazy-loaded. +let processTopLevelAwait; + +const parentModule = module; +const domainSet = new SafeWeakSet(); + +const kBufferedCommandSymbol = Symbol("bufferedCommand"); +const kLoadingSymbol = Symbol("loading"); + +let addedNewListener = false; + +fixReplRequire(module); + +// This is the default "writer" value, if none is passed in the REPL options, +// and it can be overridden by custom print functions, such as `probe` or +// `eyes.js`. +const writer = (obj) => inspect(obj, writer.options); +writer.options = { ...inspect.defaultOptions, showProxy: true }; + +// Converts static import statement to dynamic import statement +const toDynamicImport = (codeLine) => { + let dynamicImportStatement = ""; + const ast = acornParse(codeLine, { + __proto__: null, + sourceType: "module", + ecmaVersion: "latest", + }); + acornWalk.ancestor(ast, { + ImportDeclaration(node) { + const awaitDynamicImport = `await import(${JSONStringify( + node.source.value + )});`; + if (node.specifiers.length === 0) { + dynamicImportStatement += awaitDynamicImport; + } else if ( + node.specifiers.length === 1 && + node.specifiers[0].type === "ImportNamespaceSpecifier" + ) { + dynamicImportStatement += `const ${node.specifiers[0].local.name} = ${awaitDynamicImport}`; + } else { + const importNames = ArrayPrototypeJoin( + ArrayPrototypeMap(node.specifiers, ({ local, imported }) => + local.name === imported?.name + ? local.name + : `${imported?.name ?? "default"}: ${local.name}` + ), + ", " + ); + dynamicImportStatement += `const { ${importNames} } = ${awaitDynamicImport}`; + } + }, + }); + return dynamicImportStatement; +}; + +class Recoverable extends SyntaxError { + constructor(err) { + super(); + this.err = err; + } +} + +class REPLServer extends Interface { + constructor(prompt, stream, eval_, useGlobal, ignoreUndefined, replMode) { + let options; + if (prompt !== null && typeof prompt === "object") { + // An options object was given. + options = { ...prompt }; + stream = options.stream || options.socket; + eval_ = options.eval; + useGlobal = options.useGlobal; + ignoreUndefined = options.ignoreUndefined; + prompt = options.prompt; + replMode = options.replMode; + } else { + options = {}; + } + + if (!options.input && !options.output) { + // Legacy API, passing a 'stream'/'socket' option. + // Use stdin and stdout as the default streams if none were given. + stream ||= process; + + // We're given a duplex readable/writable Stream, like a `net.Socket` + // or a custom object with 2 streams, or the `process` object. + options.input = stream.stdin || stream; + options.output = stream.stdout || stream; + } + + if (options.terminal === undefined) { + options.terminal = options.output.isTTY; + } + options.terminal = !!options.terminal; + + if (options.terminal && options.useColors === undefined) { + // If possible, check if stdout supports colors or not. + options.useColors = shouldColorize(options.output); + } + + const preview = + options.terminal && + (options.preview !== undefined ? !!options.preview : !eval_); + + super({ + input: options.input, + output: options.output, + completer: options.completer || completer, + terminal: options.terminal, + historySize: options.historySize, + prompt, + }); + + ObjectDefineProperty(this, "inputStream", { + __proto__: null, + get: pendingDeprecation + ? deprecate( + () => this.input, + "repl.inputStream and repl.outputStream are deprecated. " + + "Use repl.input and repl.output instead", + "DEP0141" + ) + : () => this.input, + set: pendingDeprecation + ? deprecate( + (val) => (this.input = val), + "repl.inputStream and repl.outputStream are deprecated. " + + "Use repl.input and repl.output instead", + "DEP0141" + ) + : (val) => (this.input = val), + enumerable: false, + configurable: true, + }); + ObjectDefineProperty(this, "outputStream", { + __proto__: null, + get: pendingDeprecation + ? deprecate( + () => this.output, + "repl.inputStream and repl.outputStream are deprecated. " + + "Use repl.input and repl.output instead", + "DEP0141" + ) + : () => this.output, + set: pendingDeprecation + ? deprecate( + (val) => (this.output = val), + "repl.inputStream and repl.outputStream are deprecated. " + + "Use repl.input and repl.output instead", + "DEP0141" + ) + : (val) => (this.output = val), + enumerable: false, + configurable: true, + }); + + this.allowBlockingCompletions = !!options.allowBlockingCompletions; + this.useColors = !!options.useColors; + this._domain = options.domain || domain.create(); + this.useGlobal = !!useGlobal; + this.ignoreUndefined = !!ignoreUndefined; + this.replMode = replMode || module.exports.REPL_MODE_SLOPPY; + this.underscoreAssigned = false; + this.last = undefined; + this.underscoreErrAssigned = false; + this.lastError = undefined; + this.breakEvalOnSigint = !!options.breakEvalOnSigint; + this.editorMode = false; + // Context id for use with the inspector protocol. + this[kContextId] = undefined; + this[kLastCommandErrored] = false; + + if (this.breakEvalOnSigint && eval_) { + // Allowing this would not reflect user expectations. + // breakEvalOnSigint affects only the behavior of the default eval(). + throw new ERR_INVALID_REPL_EVAL_CONFIG(); + } + + if (options[kStandaloneREPL]) { + // It is possible to introspect the running REPL accessing this variable + // from inside the REPL. This is useful for anyone working on the REPL. + module.exports.repl = this; + } else if (!addedNewListener) { + // Add this listener only once and use a WeakSet that contains the REPLs + // domains. Otherwise we'd have to add a single listener to each REPL + // instance and that could trigger the `MaxListenersExceededWarning`. + process.prependListener("newListener", (event, listener) => { + if ( + event === "uncaughtException" && + process.domain && + listener.name !== "domainUncaughtExceptionClear" && + domainSet.has(process.domain) + ) { + // Throw an error so that the event will not be added and the current + // domain takes over. That way the user is notified about the error + // and the current code evaluation is stopped, just as any other code + // that contains an error. + throw new ERR_INVALID_REPL_INPUT( + "Listeners for `uncaughtException` cannot be used in the REPL" + ); + } + }); + addedNewListener = true; + } + + domainSet.add(this._domain); + + const savedRegExMatches = ["", "", "", "", "", "", "", "", "", ""]; + const sep = "\u0000\u0000\u0000"; + const regExMatcher = new RegExp( + `^${sep}(.*)${sep}(.*)${sep}(.*)${sep}(.*)` + + `${sep}(.*)${sep}(.*)${sep}(.*)${sep}(.*)` + + `${sep}(.*)$` + ); + + eval_ ||= defaultEval; + + const self = this; + + // Pause taking in new input, and store the keys in a buffer. + const pausedBuffer = []; + let paused = false; + function pause() { + paused = true; + } + + function unpause() { + if (!paused) return; + paused = false; + let entry; + const tmpCompletionEnabled = self.isCompletionEnabled; + while ((entry = ArrayPrototypeShift(pausedBuffer)) !== undefined) { + const { 0: type, 1: payload, 2: isCompletionEnabled } = entry; + switch (type) { + case "key": { + const { 0: d, 1: key } = payload; + self.isCompletionEnabled = isCompletionEnabled; + self._ttyWrite(d, key); + break; + } + case "close": + self.emit("exit"); + break; + } + if (paused) { + break; + } + } + self.isCompletionEnabled = tmpCompletionEnabled; + } + + function defaultEval(code, context, file, cb) { + let result, script, wrappedErr; + let err = null; + let wrappedCmd = false; + let awaitPromise = false; + const input = code; + + if (isObjectLiteral(code) && isValidSyntax(code)) { + // Add parentheses to make sure `code` is parsed as an expression + code = `(${StringPrototypeTrim(code)})\n`; + wrappedCmd = true; + } + + const hostDefinedOptionId = Symbol(`eval:${file}`); + let parentURL; + try { + const { pathToFileURL } = require("internal/url"); + // Adding `/repl` prevents dynamic imports from loading relative + // to the parent of `process.cwd()`. + parentURL = pathToFileURL(path.join(process.cwd(), "repl")).href; + } catch { + // Continue regardless of error. + } + async function importModuleDynamically( + specifier, + _, + importAttributes, + phase + ) { + const cascadedLoader = + require("internal/modules/esm/loader").getOrInitializeCascadedLoader(); + return cascadedLoader.import( + specifier, + parentURL, + importAttributes, + phase === "evaluation" + ? cascadedLoader.kEvaluationPhase + : cascadedLoader.kSourcePhase + ); + } + // `experimentalREPLAwait` is set to true by default. + // Shall be false in case `--no-experimental-repl-await` flag is used. + if (experimentalREPLAwait && StringPrototypeIncludes(code, "await")) { + if (processTopLevelAwait === undefined) { + ({ processTopLevelAwait } = require("internal/repl/await")); + } + + try { + const potentialWrappedCode = processTopLevelAwait(code); + if (potentialWrappedCode !== null) { + code = potentialWrappedCode; + wrappedCmd = true; + awaitPromise = true; + } + } catch (e) { + let recoverableError = false; + if (e.name === "SyntaxError") { + // Remove all "await"s and attempt running the script + // in order to detect if error is truly non recoverable + const fallbackCode = SideEffectFreeRegExpPrototypeSymbolReplace( + /\bawait\b/g, + code, + "" + ); + try { + makeContextifyScript( + fallbackCode, // code + file, // filename, + 0, // lineOffset + 0, // columnOffset, + undefined, // cachedData + false, // produceCachedData + undefined, // parsingContext + hostDefinedOptionId, // hostDefinedOptionId + importModuleDynamically // importModuleDynamically + ); + } catch (fallbackError) { + if (isRecoverableError(fallbackError, fallbackCode)) { + recoverableError = true; + err = new Recoverable(e); + } + } + } + if (!recoverableError) { + decorateErrorStack(e); + err = e; + } + } + } + + // First, create the Script object to check the syntax + if (code === "\n") return cb(null); + + if (err === null) { + while (true) { + try { + if ( + self.replMode === module.exports.REPL_MODE_STRICT && + RegExpPrototypeExec(/^\s*$/, code) === null + ) { + // "void 0" keeps the repl from returning "use strict" as the result + // value for statements and declarations that don't return a value. + code = `'use strict'; void 0;\n${code}`; + } + script = makeContextifyScript( + code, // code + file, // filename, + 0, // lineOffset + 0, // columnOffset, + undefined, // cachedData + false, // produceCachedData + undefined, // parsingContext + hostDefinedOptionId, // hostDefinedOptionId + importModuleDynamically // importModuleDynamically + ); + } catch (e) { + debug("parse error %j", code, e); + if (wrappedCmd) { + // Unwrap and try again + wrappedCmd = false; + awaitPromise = false; + code = input; + wrappedErr = e; + continue; + } + // Preserve original error for wrapped command + const error = wrappedErr || e; + if (isRecoverableError(error, code)) err = new Recoverable(error); + else err = error; + } + break; + } + } + + // This will set the values from `savedRegExMatches` to corresponding + // predefined RegExp properties `RegExp.$1`, `RegExp.$2` ... `RegExp.$9` + RegExpPrototypeExec( + regExMatcher, + ArrayPrototypeJoin(savedRegExMatches, sep) + ); + + let finished = false; + function finishExecution(err, result) { + if (finished) return; + finished = true; + + // After executing the current expression, store the values of RegExp + // predefined properties back in `savedRegExMatches` + for (let idx = 1; idx < savedRegExMatches.length; idx += 1) { + savedRegExMatches[idx] = RegExp[`$${idx}`]; + } + + cb(err, result); + } + + if (!err) { + // Unset raw mode during evaluation so that Ctrl+C raises a signal. + let previouslyInRawMode; + if (self.breakEvalOnSigint) { + // Start the SIGINT watchdog before entering raw mode so that a very + // quick Ctrl+C doesn't lead to aborting the process completely. + if (!startSigintWatchdog()) throw new ERR_CANNOT_WATCH_SIGINT(); + previouslyInRawMode = self._setRawMode(false); + } + + try { + try { + const scriptOptions = { + displayErrors: false, + breakOnSigint: self.breakEvalOnSigint, + }; + + if (self.useGlobal) { + result = FunctionPrototypeCall( + runInThisContext, + script, + scriptOptions + ); + } else { + result = FunctionPrototypeCall( + runInContext, + script, + context, + scriptOptions + ); + } + } finally { + if (self.breakEvalOnSigint) { + // Reset terminal mode to its previous value. + self._setRawMode(previouslyInRawMode); + + // Returns true if there were pending SIGINTs *after* the script + // has terminated without being interrupted itself. + if (stopSigintWatchdog()) { + self.emit("SIGINT"); + } + } + } + } catch (e) { + err = e; + + if (process.domain) { + debug("not recoverable, send to domain"); + process.domain.emit("error", err); + process.domain.exit(); + return; + } + } + + if (awaitPromise && !err) { + let sigintListener; + pause(); + let promise = result; + if (self.breakEvalOnSigint) { + const interrupt = new Promise((resolve, reject) => { + sigintListener = () => { + const tmp = MainContextError.stackTraceLimit; + if (isErrorStackTraceLimitWritable()) + MainContextError.stackTraceLimit = 0; + const err = new ERR_SCRIPT_EXECUTION_INTERRUPTED(); + if (isErrorStackTraceLimitWritable()) + MainContextError.stackTraceLimit = tmp; + reject(err); + }; + prioritizedSigintQueue.add(sigintListener); + }); + promise = SafePromiseRace([promise, interrupt]); + } + + (async () => { + try { + const result = (await promise)?.value; + finishExecution(null, result); + } catch (err) { + if (err && process.domain) { + debug("not recoverable, send to domain"); + process.domain.emit("error", err); + process.domain.exit(); + return; + } + finishExecution(err); + } finally { + // Remove prioritized SIGINT listener if it was not called. + prioritizedSigintQueue.delete(sigintListener); + unpause(); + } + })(); + } + } + + if (!awaitPromise || err) { + finishExecution(err, result); + } + } + + self.eval = self._domain.bind(eval_); + + self._domain.on("error", function debugDomainError(e) { + debug("domain error"); + let errStack = ""; + + if (typeof e === "object" && e !== null) { + overrideStackTrace.set(e, (error, stackFrames) => { + let frames; + if (typeof stackFrames === "object") { + // Search from the bottom of the call stack to + // find the first frame with a null function name + const idx = ArrayPrototypeFindLastIndex( + stackFrames, + (frame) => frame.getFunctionName() === null + ); + // If found, get rid of it and everything below it + frames = ArrayPrototypeSlice(stackFrames, 0, idx); + } else { + frames = stackFrames; + } + // FIXME(devsnek): this is inconsistent with the checks + // that the real prepareStackTrace dispatch uses in + // lib/internal/errors.js. + if (typeof MainContextError.prepareStackTrace === "function") { + return MainContextError.prepareStackTrace(error, frames); + } + return ErrorPrepareStackTrace(error, frames); + }); + decorateErrorStack(e); + + if (e.domainThrown) { + delete e.domain; + delete e.domainThrown; + } + + if (isError(e)) { + if (e.stack) { + if (e.name === "SyntaxError") { + // Remove stack trace. + e.stack = SideEffectFreeRegExpPrototypeSymbolReplace( + /^\s+at\s.*\n?/gm, + SideEffectFreeRegExpPrototypeSymbolReplace( + /^REPL\d+:\d+\r?\n/, + e.stack, + "" + ), + "" + ); + const importErrorStr = + "Cannot use import statement outside a " + "module"; + if (StringPrototypeIncludes(e.message, importErrorStr)) { + e.message = + "Cannot use import statement inside the Node.js " + + "REPL, alternatively use dynamic import: " + + toDynamicImport(ArrayPrototypeAt(self.lines, -1)); + e.stack = SideEffectFreeRegExpPrototypeSymbolReplace( + /SyntaxError:.*\n/, + e.stack, + `SyntaxError: ${e.message}\n` + ); + } + } else if (self.replMode === module.exports.REPL_MODE_STRICT) { + e.stack = SideEffectFreeRegExpPrototypeSymbolReplace( + /(\s+at\s+REPL\d+:)(\d+)/, + e.stack, + (_, pre, line) => pre + (line - 1) + ); + } + } + errStack = self.writer(e); + + // Remove one line error braces to keep the old style in place. + if (errStack[0] === "[" && errStack[errStack.length - 1] === "]") { + errStack = StringPrototypeSlice(errStack, 1, -1); + } + } + } + + if (!self.underscoreErrAssigned) { + self.lastError = e; + } + + if ( + options[kStandaloneREPL] && + process.listenerCount("uncaughtException") !== 0 + ) { + process.nextTick(() => { + process.emit("uncaughtException", e); + self.clearBufferedCommand(); + self.lines.level = []; + if (!self.closed) { + self.displayPrompt(); + } + }); + } else { + if (errStack === "") { + errStack = self.writer(e); + } + const lines = SideEffectFreeRegExpPrototypeSymbolSplit( + /(?<=\n)/, + errStack + ); + let matched = false; + + errStack = ""; + ArrayPrototypeForEach(lines, (line) => { + if ( + !matched && + RegExpPrototypeExec(/^\[?([A-Z][a-z0-9_]*)*Error/, line) !== null + ) { + errStack += + writer.options.breakLength >= line.length + ? `Uncaught ${line}` + : `Uncaught:\n${line}`; + matched = true; + } else { + errStack += line; + } + }); + if (!matched) { + const ln = lines.length === 1 ? " " : ":\n"; + errStack = `Uncaught${ln}${errStack}`; + } + // Normalize line endings. + errStack += StringPrototypeEndsWith(errStack, "\n") ? "" : "\n"; + self.output.write(errStack); + self.clearBufferedCommand(); + self.lines.level = []; + if (!self.closed) { + self.displayPrompt(); + } + } + }); + + self.clearBufferedCommand(); + + function completer(text, cb) { + FunctionPrototypeCall( + complete, + self, + text, + self.editorMode ? self.completeOnEditorMode(cb) : cb + ); + } + + self.resetContext(); + + this.commands = { __proto__: null }; + defineDefaultCommands(this); + + // Figure out which "writer" function to use + self.writer = options.writer || module.exports.writer; + + if (self.writer === writer) { + // Conditionally turn on ANSI coloring. + writer.options.colors = self.useColors; + + if (options[kStandaloneREPL]) { + ObjectDefineProperty(inspect, "replDefaults", { + __proto__: null, + get() { + return writer.options; + }, + set(options) { + validateObject(options, "options"); + return ObjectAssign(writer.options, options); + }, + enumerable: true, + configurable: true, + }); + } + } + + function _parseREPLKeyword(keyword, rest) { + const cmd = this.commands[keyword]; + if (cmd) { + FunctionPrototypeCall(cmd.action, this, rest); + return true; + } + return false; + } + + self.on("close", function emitExit() { + if (paused) { + ArrayPrototypePush(pausedBuffer, ["close"]); + return; + } + self.emit("exit"); + }); + + let sawSIGINT = false; + let sawCtrlD = false; + const prioritizedSigintQueue = new SafeSet(); + self.on("SIGINT", function onSigInt() { + if (prioritizedSigintQueue.size > 0) { + for (const task of prioritizedSigintQueue) { + task(); + } + return; + } + + const empty = self.line.length === 0; + self.clearLine(); + _turnOffEditorMode(self); + + const cmd = self[kBufferedCommandSymbol]; + if (!(cmd && cmd.length > 0) && empty) { + if (sawSIGINT) { + self.close(); + sawSIGINT = false; + return; + } + self.output.write( + "(To exit, press Ctrl+C again or Ctrl+D or type .exit)\n" + ); + sawSIGINT = true; + } else { + sawSIGINT = false; + } + + self.clearBufferedCommand(); + self.lines.level = []; + self.displayPrompt(); + }); + + self.on("line", function onLine(cmd) { + debug("line %j", cmd); + cmd ||= ""; + sawSIGINT = false; + + if (self.editorMode) { + self[kBufferedCommandSymbol] += cmd + "\n"; + + // code alignment + const matches = + self._sawKeyPress && !self[kLoadingSymbol] + ? RegExpPrototypeExec(/^\s+/, cmd) + : null; + if (matches) { + const prefix = matches[0]; + self.write(prefix); + self.line = prefix; + self.cursor = prefix.length; + } + FunctionPrototypeCall(_memory, self, cmd); + return; + } + + // Check REPL keywords and empty lines against a trimmed line input. + const trimmedCmd = StringPrototypeTrim(cmd); + + // Check to see if a REPL keyword was used. If it returns true, + // display next prompt and return. + if (trimmedCmd) { + if ( + StringPrototypeCharAt(trimmedCmd, 0) === "." && + StringPrototypeCharAt(trimmedCmd, 1) !== "." && + NumberIsNaN(NumberParseFloat(trimmedCmd)) + ) { + const matches = RegExpPrototypeExec( + /^\.([^\s]+)\s*(.*)$/, + trimmedCmd + ); + const keyword = matches?.[1]; + const rest = matches?.[2]; + if ( + FunctionPrototypeCall(_parseREPLKeyword, self, keyword, rest) === + true + ) { + return; + } + if (!self[kBufferedCommandSymbol]) { + self.output.write("Invalid REPL keyword\n"); + finish(null); + return; + } + } + } + + const evalCmd = self[kBufferedCommandSymbol] + cmd + "\n"; + + debug("eval %j", evalCmd); + self.eval(evalCmd, self.context, getREPLResourceName(), finish); + + function finish(e, ret) { + debug("finish", e, ret); + FunctionPrototypeCall(_memory, self, cmd); + + if ( + e && + !self[kBufferedCommandSymbol] && + StringPrototypeStartsWith(StringPrototypeTrim(cmd), "npm ") && + !(e instanceof Recoverable) + ) { + self.output.write( + "npm should be run outside of the " + + "Node.js REPL, in your normal shell.\n" + + "(Press Ctrl+D to exit.)\n" + ); + self.displayPrompt(); + return; + } + + // If error was SyntaxError and not JSON.parse error + // We can start a multiline command + if (e instanceof Recoverable && !sawCtrlD) { + if (self.terminal) { + self[kAddNewLineOnTTY](); + } else { + self[kBufferedCommandSymbol] += cmd + "\n"; + self.displayPrompt(); + } + return; + } + + if (e) { + self._domain.emit("error", e.err || e); + self[kLastCommandErrored] = true; + } + + // Clear buffer if no SyntaxErrors + self.clearBufferedCommand(); + sawCtrlD = false; + + // If we got any output - print it (if no error) + if ( + !e && + // When an invalid REPL command is used, error message is printed + // immediately. We don't have to print anything else. So, only when + // the second argument to this function is there, print it. + arguments.length === 2 && + (!self.ignoreUndefined || ret !== undefined) + ) { + if (!self.underscoreAssigned) { + self.last = ret; + } + self.output.write(self.writer(ret) + "\n"); + } + + // If the REPL sever hasn't closed display prompt again (unless we already + // did by emitting the 'error' event on the domain instance). + if (!self.closed && !e) { + self[kLastCommandErrored] = false; + self.displayPrompt(); + } + } + }); + + self.on("SIGCONT", function onSigCont() { + if (self.editorMode) { + self.output.write(`${self._initialPrompt}.editor\n`); + self.output.write( + "// Entering editor mode (Ctrl+D to finish, Ctrl+C to cancel)\n" + ); + self.output.write(`${self[kBufferedCommandSymbol]}\n`); + self.prompt(true); + } else { + self.displayPrompt(true); + } + }); + + const { reverseSearch } = setupReverseSearch(this); + + const { clearPreview, showPreview } = setupPreview( + this, + kContextId, + kBufferedCommandSymbol, + preview + ); + + // Wrap readline tty to enable editor mode and pausing. + const ttyWrite = FunctionPrototypeBind(self._ttyWrite, self); + self._ttyWrite = (d, key) => { + key ||= {}; + if (paused && !(self.breakEvalOnSigint && key.ctrl && key.name === "c")) { + ArrayPrototypePush(pausedBuffer, [ + "key", + [d, key], + self.isCompletionEnabled, + ]); + return; + } + if (!self.editorMode || !self.terminal) { + // Before exiting, make sure to clear the line. + if ( + key.ctrl && + key.name === "d" && + self.cursor === 0 && + self.line.length === 0 + ) { + self.clearLine(); + } + clearPreview(key); + if (!reverseSearch(d, key)) { + ttyWrite(d, key); + const showCompletionPreview = key.name !== "escape"; + showPreview(showCompletionPreview); + } + return; + } + + // Editor mode + if (key.ctrl && !key.shift) { + switch (key.name) { + // TODO(BridgeAR): There should not be a special mode necessary for full + // multiline support. + case "d": // End editor mode + _turnOffEditorMode(self); + sawCtrlD = true; + ttyWrite(d, { name: "return" }); + break; + case "n": // Override next history item + case "p": // Override previous history item + break; + default: + ttyWrite(d, key); + } + } else { + switch (key.name) { + case "up": // Override previous history item + case "down": // Override next history item + break; + case "tab": + // Prevent double tab behavior + self._previousKey = null; + ttyWrite(d, key); + break; + default: + ttyWrite(d, key); + } + } + }; + + self.displayPrompt(); + } + setupHistory(historyConfig = {}, cb) { + // TODO(puskin94): necessary because historyConfig can be a string for backwards compatibility + const options = + typeof historyConfig === "string" + ? { filePath: historyConfig } + : historyConfig; + + if (typeof cb === "function") { + options.onHistoryFileLoaded = cb; + } + + this.setupHistoryManager(options); + } + clearBufferedCommand() { + this[kBufferedCommandSymbol] = ""; + } + close() { + if ( + this.terminal && + this.historyManager.isFlushing && + !this._closingOnFlush + ) { + this._closingOnFlush = true; + this.once("flushHistory", () => super.close()); + + return; + } + process.nextTick(() => super.close()); + } + createContext() { + let context; + if (this.useGlobal) { + context = globalThis; + } else { + sendInspectorCommand( + (session) => { + session.post("Runtime.enable"); + session.once("Runtime.executionContextCreated", ({ params }) => { + this[kContextId] = params.context.id; + }); + context = vm.createContext(); + session.post("Runtime.disable"); + }, + () => { + context = vm.createContext(); + } + ); + ArrayPrototypeForEach(ObjectGetOwnPropertyNames(globalThis), (name) => { + // Only set properties that do not already exist as a global builtin. + if (!globalBuiltins.has(name)) { + ObjectDefineProperty(context, name, { + __proto__: null, + ...ObjectGetOwnPropertyDescriptor(globalThis, name), + }); + } + }); + context.global = context; + const _console = new Console(this.output); + ObjectDefineProperty(context, "console", { + __proto__: null, + configurable: true, + writable: true, + value: _console, + }); + } + + const replModule = new CJSModule(""); + replModule.paths = CJSModule._resolveLookupPaths("", parentModule); + + ObjectDefineProperty(context, "module", { + __proto__: null, + configurable: true, + writable: true, + value: replModule, + }); + ObjectDefineProperty(context, "require", { + __proto__: null, + configurable: true, + writable: true, + value: makeRequireFunction(replModule), + }); + + addBuiltinLibsToObject(context, ""); + + return context; + } + resetContext() { + this.context = this.createContext(); + this.underscoreAssigned = false; + this.underscoreErrAssigned = false; + // TODO(BridgeAR): Deprecate the lines. + this.lines = []; + this.lines.level = []; + + ObjectDefineProperty(this.context, "_", { + __proto__: null, + configurable: true, + get: () => this.last, + set: (value) => { + this.last = value; + if (!this.underscoreAssigned) { + this.underscoreAssigned = true; + this.output.write("Expression assignment to _ now disabled.\n"); + } + }, + }); + + ObjectDefineProperty(this.context, "_error", { + __proto__: null, + configurable: true, + get: () => this.lastError, + set: (value) => { + this.lastError = value; + if (!this.underscoreErrAssigned) { + this.underscoreErrAssigned = true; + this.output.write("Expression assignment to _error now disabled.\n"); + } + }, + }); + + // Allow REPL extensions to extend the new context + this.emit("reset", this.context); + } + displayPrompt(preserveCursor) { + let prompt = this._initialPrompt; + if (this[kBufferedCommandSymbol].length) { + prompt = kMultilinePrompt.description; + } + + // Do not overwrite `_initialPrompt` here + super.setPrompt(prompt); + this.prompt(preserveCursor); + } + // When invoked as an API method, overwrite _initialPrompt + setPrompt(prompt) { + this._initialPrompt = prompt; + super.setPrompt(prompt); + } + complete() { + ReflectApply(this.completer, this, arguments); + } + completeOnEditorMode(callback) { + return (err, results) => { + if (err) return callback(err); + + const { 0: completions, 1: completeOn = "" } = results; + let result = ArrayPrototypeFilter(completions, Boolean); + + if (completeOn && result.length !== 0) { + result = [commonPrefix(result)]; + } + + callback(null, [result, completeOn]); + }; + } + defineCommand(keyword, cmd) { + if (typeof cmd === "function") { + cmd = { action: cmd }; + } else { + validateFunction(cmd.action, "cmd.action"); + } + this.commands[keyword] = cmd; + } +} + +// Prompt is a string to print on each line for the prompt, +// source is a stream to use for I/O, defaulting to stdin/stdout. +function start(prompt, source, eval_, useGlobal, ignoreUndefined, replMode) { + return new REPLServer( + prompt, + source, + eval_, + useGlobal, + ignoreUndefined, + replMode + ); +} + +// TODO(BridgeAR): This should be replaced with acorn to build an AST. The +// language became more complex and using a simple approach like this is not +// sufficient anymore. +function _memory(cmd) { + const self = this; + self.lines ||= []; + self.lines.level ||= []; + + // Save the line so I can do magic later + if (cmd) { + const len = self.lines.level.length ? self.lines.level.length - 1 : 0; + ArrayPrototypePush(self.lines, StringPrototypeRepeat(" ", len) + cmd); + } else { + // I don't want to not change the format too much... + ArrayPrototypePush(self.lines, ""); + } + + if (!cmd) { + self.lines.level = []; + return; + } + + // I need to know "depth." + // Because I can not tell the difference between a } that + // closes an object literal and a } that closes a function + const countMatches = (regex, str) => { + let count = 0; + while (RegExpPrototypeExec(regex, str) !== null) count++; + return count; + }; + + // Going down is { and ( e.g. function() { + // going up is } and ) + const dw = countMatches(/[{(]/g, cmd); + const up = countMatches(/[})]/g, cmd); + let depth = dw.length - up.length; + + if (depth) { + (function workIt() { + if (depth > 0) { + // Going... down. + // Push the line#, depth count, and if the line is a function. + // Since JS only has functional scope I only need to remove + // "function() {" lines, clearly this will not work for + // "function() + // {" but nothing should break, only tab completion for local + // scope will not work for this function. + ArrayPrototypePush(self.lines.level, { + line: self.lines.length - 1, + depth: depth, + }); + } else if (depth < 0) { + // Going... up. + const curr = ArrayPrototypePop(self.lines.level); + if (curr) { + const tmp = curr.depth + depth; + if (tmp < 0) { + // More to go, recurse + depth += curr.depth; + workIt(); + } else if (tmp > 0) { + // Remove and push back + curr.depth += depth; + ArrayPrototypePush(self.lines.level, curr); + } + } + } + })(); + } +} + +function _turnOnEditorMode(repl) { + repl.editorMode = true; + FunctionPrototypeCall(Interface.prototype.setPrompt, repl, ""); +} + +function _turnOffEditorMode(repl) { + repl.editorMode = false; + repl.setPrompt(repl._initialPrompt); +} + +function defineDefaultCommands(repl) { + repl.defineCommand("break", { + help: "Sometimes you get stuck, this gets you out", + action: function () { + this.clearBufferedCommand(); + this.displayPrompt(); + }, + }); + + let clearMessage; + if (repl.useGlobal) { + clearMessage = "Alias for .break"; + } else { + clearMessage = "Break, and also clear the local context"; + } + repl.defineCommand("clear", { + help: clearMessage, + action: function () { + this.clearBufferedCommand(); + if (!this.useGlobal) { + this.output.write("Clearing context...\n"); + this.resetContext(); + } + this.displayPrompt(); + }, + }); + + repl.defineCommand("exit", { + help: "Exit the REPL", + action: function () { + this.close(); + }, + }); + + repl.defineCommand("help", { + help: "Print this help message", + action: function () { + const names = ArrayPrototypeSort(ObjectKeys(this.commands)); + const longestNameLength = MathMaxApply( + ArrayPrototypeMap(names, (name) => name.length) + ); + ArrayPrototypeForEach(names, (name) => { + const cmd = this.commands[name]; + const spaces = StringPrototypeRepeat( + " ", + longestNameLength - name.length + 3 + ); + const line = `.${name}${cmd.help ? spaces + cmd.help : ""}\n`; + this.output.write(line); + }); + this.output.write( + "\nPress Ctrl+C to abort current expression, " + + "Ctrl+D to exit the REPL\n" + ); + this.displayPrompt(); + }, + }); + + repl.defineCommand("save", { + help: "Save all evaluated commands in this REPL session to a file", + action: function (file) { + try { + if (file === "") { + throw new ERR_MISSING_ARGS("file"); + } + fs.writeFileSync(file, ArrayPrototypeJoin(this.lines, "\n")); + this.output.write(`Session saved to: ${file}\n`); + } catch (error) { + if (error instanceof ERR_MISSING_ARGS) { + this.output.write(`${error.message}\n`); + } else { + this.output.write(`Failed to save: ${file}\n`); + } + } + this.displayPrompt(); + }, + }); + + repl.defineCommand("load", { + help: "Load JS from a file into the REPL session", + action: function (file) { + try { + if (file === "") { + throw new ERR_MISSING_ARGS("file"); + } + const stats = fs.statSync(file); + if (stats && stats.isFile()) { + _turnOnEditorMode(this); + this[kLoadingSymbol] = true; + const data = fs.readFileSync(file, "utf8"); + this.write(data); + this[kLoadingSymbol] = false; + _turnOffEditorMode(this); + this.write("\n"); + } else { + this.output.write(`Failed to load: ${file} is not a valid file\n`); + } + } catch (error) { + if (error instanceof ERR_MISSING_ARGS) { + this.output.write(`${error.message}\n`); + } else { + this.output.write(`Failed to load: ${file}\n`); + } + } + this.displayPrompt(); + }, + }); + if (repl.terminal) { + repl.defineCommand("editor", { + help: "Enter editor mode", + action() { + _turnOnEditorMode(this); + this.output.write( + "// Entering editor mode (Ctrl+D to finish, Ctrl+C to cancel)\n" + ); + }, + }); + } +} + +module.exports = { + start, + writer, + REPLServer, + REPL_MODE_SLOPPY, + REPL_MODE_STRICT, + Recoverable, + isValidSyntax, +}; + +ObjectDefineProperty(module.exports, "builtinModules", { + __proto__: null, + get: pendingDeprecation + ? deprecate( + () => getReplBuiltinLibs(), + "repl.builtinModules is deprecated. Check module.builtinModules instead", + "DEP0191" + ) + : () => getReplBuiltinLibs(), + set: pendingDeprecation + ? deprecate( + (val) => setReplBuiltinLibs(val), + "repl.builtinModules is deprecated. Check module.builtinModules instead", + "DEP0191" + ) + : (val) => setReplBuiltinLibs(val), + enumerable: false, + configurable: true, +}); + +ObjectDefineProperty(module.exports, "_builtinLibs", { + __proto__: null, + get: pendingDeprecation + ? deprecate( + () => getReplBuiltinLibs(), + "repl._builtinLibs is deprecated. Check module.builtinModules instead", + "DEP0142" + ) + : () => getReplBuiltinLibs(), + set: pendingDeprecation + ? deprecate( + (val) => setReplBuiltinLibs(val), + "repl._builtinLibs is deprecated. Check module.builtinModules instead", + "DEP0142" + ) + : (val) => setReplBuiltinLibs(val), + enumerable: false, + configurable: true, +}); diff --git a/.codesandbox/node/section-links.js b/.codesandbox/node/section-links.js new file mode 100644 index 00000000..a926286d --- /dev/null +++ b/.codesandbox/node/section-links.js @@ -0,0 +1,21 @@ +document.addEventListener('DOMContentLoaded', function(event) { + function f(n) { + if (n.nodeType == 1 && n.tagName.match(/^H[1-6]$/)) { + var span = document.createElement('span'); + span.className = 'section-link'; + span.textContent = '\xa0'; + var a = document.createElement('a'); + a.href = '#' + n.parentNode.id; + a.textContent = '\xb6'; + span.appendChild(a); + n.appendChild(span); + } else { + n = n.firstChild; + while (n) { + f(n); + n = n.nextSibling; + } + } + } + f(document.getElementById('sections')); + }, false); diff --git a/.codesandbox/node/sqlite.js b/.codesandbox/node/sqlite.js new file mode 100644 index 00000000..7724f789 --- /dev/null +++ b/.codesandbox/node/sqlite.js @@ -0,0 +1,6 @@ +"use strict"; +const { emitExperimentalWarning } = require("internal/util"); + +emitExperimentalWarning("SQLite"); + +module.exports = internalBinding("sqlite"); diff --git a/.codesandbox/node/stream.js b/.codesandbox/node/stream.js new file mode 100644 index 00000000..3d75a30e --- /dev/null +++ b/.codesandbox/node/stream.js @@ -0,0 +1,130 @@ +"use strict"; + +const { ObjectDefineProperty, ObjectKeys, ReflectApply } = primordials; + +const { + promisify: { custom: customPromisify }, +} = require("internal/util"); + +const { + streamReturningOperators, + promiseReturningOperators, +} = require("internal/streams/operators"); + +const { + codes: { ERR_ILLEGAL_CONSTRUCTOR }, +} = require("internal/errors"); +const compose = require("internal/streams/compose"); +const { + setDefaultHighWaterMark, + getDefaultHighWaterMark, +} = require("internal/streams/state"); +const { pipeline } = require("internal/streams/pipeline"); +const { destroyer } = require("internal/streams/destroy"); +const eos = require("internal/streams/end-of-stream"); +const internalBuffer = require("internal/buffer"); + +const promises = require("stream/promises"); +const utils = require("internal/streams/utils"); +const { isArrayBufferView, isUint8Array } = require("internal/util/types"); + +const Stream = (module.exports = require("internal/streams/legacy").Stream); + +Stream.isDestroyed = utils.isDestroyed; +Stream.isDisturbed = utils.isDisturbed; +Stream.isErrored = utils.isErrored; +Stream.isReadable = utils.isReadable; +Stream.isWritable = utils.isWritable; + +Stream.Readable = require("internal/streams/readable"); +const streamKeys = ObjectKeys(streamReturningOperators); +for (let i = 0; i < streamKeys.length; i++) { + const key = streamKeys[i]; + const op = streamReturningOperators[key]; + function fn(...args) { + if (new.target) { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + return Stream.Readable.from(ReflectApply(op, this, args)); + } + ObjectDefineProperty(fn, "name", { __proto__: null, value: op.name }); + ObjectDefineProperty(fn, "length", { __proto__: null, value: op.length }); + ObjectDefineProperty(Stream.Readable.prototype, key, { + __proto__: null, + value: fn, + enumerable: false, + configurable: true, + writable: true, + }); +} +const promiseKeys = ObjectKeys(promiseReturningOperators); +for (let i = 0; i < promiseKeys.length; i++) { + const key = promiseKeys[i]; + const op = promiseReturningOperators[key]; + function fn(...args) { + if (new.target) { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + return ReflectApply(op, this, args); + } + ObjectDefineProperty(fn, "name", { __proto__: null, value: op.name }); + ObjectDefineProperty(fn, "length", { __proto__: null, value: op.length }); + ObjectDefineProperty(Stream.Readable.prototype, key, { + __proto__: null, + value: fn, + enumerable: false, + configurable: true, + writable: true, + }); +} +Stream.Writable = require("internal/streams/writable"); +Stream.Duplex = require("internal/streams/duplex"); +Stream.Transform = require("internal/streams/transform"); +Stream.PassThrough = require("internal/streams/passthrough"); +Stream.duplexPair = require("internal/streams/duplexpair"); +Stream.pipeline = pipeline; +const { addAbortSignal } = require("internal/streams/add-abort-signal"); +Stream.addAbortSignal = addAbortSignal; +Stream.finished = eos; +Stream.destroy = destroyer; +Stream.compose = compose; +Stream.setDefaultHighWaterMark = setDefaultHighWaterMark; +Stream.getDefaultHighWaterMark = getDefaultHighWaterMark; + +ObjectDefineProperty(Stream, "promises", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return promises; + }, +}); + +ObjectDefineProperty(pipeline, customPromisify, { + __proto__: null, + enumerable: true, + get() { + return promises.pipeline; + }, +}); + +ObjectDefineProperty(eos, customPromisify, { + __proto__: null, + enumerable: true, + get() { + return promises.finished; + }, +}); + +// Backwards-compat with node 0.4.x +Stream.Stream = Stream; + +Stream._isArrayBufferView = isArrayBufferView; +Stream._isUint8Array = isUint8Array; +Stream._uint8ArrayToBuffer = function _uint8ArrayToBuffer(chunk) { + return new internalBuffer.FastBuffer( + chunk.buffer, + chunk.byteOffset, + chunk.byteLength + ); +}; diff --git a/.codesandbox/node/string_decoder.js b/.codesandbox/node/string_decoder.js new file mode 100644 index 00000000..b774af1b --- /dev/null +++ b/.codesandbox/node/string_decoder.js @@ -0,0 +1,125 @@ +'use strict'; + +const { + ArrayBufferIsView, + ObjectDefineProperties, + Symbol, + TypedArrayPrototypeSubarray, +} = primordials; + +const { Buffer } = require('buffer'); +const { + kIncompleteCharactersStart, + kIncompleteCharactersEnd, + kMissingBytes, + kBufferedBytes, + kEncodingField, + kSize, + decode, + flush, +} = internalBinding('string_decoder'); +const { + encodingsMap, + normalizeEncoding, +} = require('internal/util'); +const { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_THIS, + ERR_UNKNOWN_ENCODING, +} = require('internal/errors').codes; + +const kNativeDecoder = Symbol('kNativeDecoder'); + +/** + * StringDecoder provides an interface for efficiently splitting a series of + * buffers into a series of JS strings without breaking apart multibyte + * characters. + * @param {string} [encoding] + */ +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + if (this.encoding === undefined) { + throw new ERR_UNKNOWN_ENCODING(encoding); + } + this[kNativeDecoder] = Buffer.alloc(kSize); + this[kNativeDecoder][kEncodingField] = encodingsMap[this.encoding]; +} + +/** + * Returns a decoded string, omitting any incomplete multi-bytes + * characters at the end of the Buffer, or TypedArray, or DataView + * @param {string | Buffer | TypedArray | DataView} buf + * @returns {string} + * @throws {TypeError} Throws when buf is not in one of supported types + */ +StringDecoder.prototype.write = function write(buf) { + if (typeof buf === 'string') + return buf; + if (!ArrayBufferIsView(buf)) + throw new ERR_INVALID_ARG_TYPE('buf', + ['Buffer', 'TypedArray', 'DataView'], + buf); + if (!this[kNativeDecoder]) { + throw new ERR_INVALID_THIS('StringDecoder'); + } + return decode(this[kNativeDecoder], buf); +}; + +/** + * Returns any remaining input stored in the internal buffer as a string. + * After end() is called, the stringDecoder object can be reused for new + * input. + * @param {string | Buffer | TypedArray | DataView} [buf] + * @returns {string} + */ +StringDecoder.prototype.end = function end(buf) { + const ret = buf === undefined ? '' : this.write(buf); + if (this[kNativeDecoder][kBufferedBytes] > 0) + return ret + flush(this[kNativeDecoder]); + return ret; +}; + +/* Everything below this line is undocumented legacy stuff. */ +/** + * + * @param {string | Buffer | TypedArray | DataView} buf + * @param {number} offset + * @returns {string} + */ +StringDecoder.prototype.text = function text(buf, offset) { + this[kNativeDecoder][kMissingBytes] = 0; + this[kNativeDecoder][kBufferedBytes] = 0; + return this.write(buf.slice(offset)); +}; + +ObjectDefineProperties(StringDecoder.prototype, { + lastChar: { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return TypedArrayPrototypeSubarray(this[kNativeDecoder], + kIncompleteCharactersStart, + kIncompleteCharactersEnd); + }, + }, + lastNeed: { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return this[kNativeDecoder][kMissingBytes]; + }, + }, + lastTotal: { + __proto__: null, + configurable: true, + enumerable: true, + get() { + return this[kNativeDecoder][kBufferedBytes] + + this[kNativeDecoder][kMissingBytes]; + }, + }, +}); + +exports.StringDecoder = StringDecoder; \ No newline at end of file diff --git a/.codesandbox/node/task_processor.js b/.codesandbox/node/task_processor.js new file mode 100644 index 00000000..ebb455ee --- /dev/null +++ b/.codesandbox/node/task_processor.js @@ -0,0 +1,91 @@ +const { parentPort } = require('node:worker_threads'); +parentPort.on('message', (task) => { + parentPort.postMessage(task.a + task.b); +}); +const { AsyncResource } = require('node:async_hooks'); +const { EventEmitter } = require('node:events'); +const path = require('node:path'); +const { Worker } = require('node:worker_threads'); + +const kTaskInfo = Symbol('kTaskInfo'); +const kWorkerFreedEvent = Symbol('kWorkerFreedEvent'); + +class WorkerPoolTaskInfo extends AsyncResource { + constructor(callback) { + super('WorkerPoolTaskInfo'); + this.callback = callback; + } + + done(err, result) { + this.runInAsyncScope(this.callback, null, err, result); + this.emitDestroy(); // `TaskInfo`s are used only once. + } +} + +class WorkerPool extends EventEmitter { + constructor(numThreads) { + super(); + this.numThreads = numThreads; + this.workers = []; + this.freeWorkers = []; + this.tasks = []; + + for (let i = 0; i < numThreads; i++) + this.addNewWorker(); + + // Any time the kWorkerFreedEvent is emitted, dispatch + // the next task pending in the queue, if any. + this.on(kWorkerFreedEvent, () => { + if (this.tasks.length > 0) { + const { task, callback } = this.tasks.shift(); + this.runTask(task, callback); + } + }); + } + + addNewWorker() { + const worker = new Worker(path.resolve(__dirname, 'task_processor.js')); + worker.on('message', (result) => { + // In case of success: Call the callback that was passed to `runTask`, + // remove the `TaskInfo` associated with the Worker, and mark it as free + // again. + worker[kTaskInfo].done(null, result); + worker[kTaskInfo] = null; + this.freeWorkers.push(worker); + this.emit(kWorkerFreedEvent); + }); + worker.on('error', (err) => { + // In case of an uncaught exception: Call the callback that was passed to + // `runTask` with the error. + if (worker[kTaskInfo]) + worker[kTaskInfo].done(err, null); + else + this.emit('error', err); + // Remove the worker from the list and start a new Worker to replace the + // current one. + this.workers.splice(this.workers.indexOf(worker), 1); + this.addNewWorker(); + }); + this.workers.push(worker); + this.freeWorkers.push(worker); + this.emit(kWorkerFreedEvent); + } + + runTask(task, callback) { + if (this.freeWorkers.length === 0) { + // No free threads, wait until a worker thread becomes free. + this.tasks.push({ task, callback }); + return; + } + + const worker = this.freeWorkers.pop(); + worker[kTaskInfo] = new WorkerPoolTaskInfo(callback); + worker.postMessage(task); + } + + close() { + for (const worker of this.workers) worker.terminate(); + } +} + +module.exports = WorkerPool; \ No newline at end of file diff --git a/.codesandbox/node/test.js b/.codesandbox/node/test.js new file mode 100644 index 00000000..23df1276 --- /dev/null +++ b/.codesandbox/node/test.js @@ -0,0 +1,83 @@ +"use strict"; + +const { ObjectAssign, ObjectDefineProperty } = primordials; + +const { + test, + suite, + before, + after, + beforeEach, + afterEach, +} = require("internal/test_runner/harness"); +const { run } = require("internal/test_runner/runner"); + +module.exports = test; +ObjectAssign(module.exports, { + after, + afterEach, + before, + beforeEach, + describe: suite, + it: test, + run, + suite, + test, +}); + +let lazyMock; + +ObjectDefineProperty(module.exports, "mock", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + if (lazyMock === undefined) { + const { MockTracker } = require("internal/test_runner/mock/mock"); + + lazyMock = new MockTracker(); + } + + return lazyMock; + }, +}); + +let lazySnapshot; + +ObjectDefineProperty(module.exports, "snapshot", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + if (lazySnapshot === undefined) { + const { + setDefaultSnapshotSerializers, + setResolveSnapshotPath, + } = require("internal/test_runner/snapshot"); + + lazySnapshot = { + __proto__: null, + setDefaultSnapshotSerializers, + setResolveSnapshotPath, + }; + } + + return lazySnapshot; + }, +}); + +let lazyAssert; + +ObjectDefineProperty(module.exports, "assert", { + __proto__: null, + configurable: true, + enumerable: true, + get() { + if (lazyAssert === undefined) { + const { register } = require("internal/test_runner/assert"); + lazyAssert = { __proto__: null, register }; + } + + return lazyAssert; + }, +}); diff --git a/.codesandbox/node/tls.js b/.codesandbox/node/tls.js new file mode 100644 index 00000000..6cd6b269 --- /dev/null +++ b/.codesandbox/node/tls.js @@ -0,0 +1,405 @@ +'use strict'; + +const { + Array, + ArrayIsArray, + // eslint-disable-next-line no-restricted-syntax + ArrayPrototypePush, + JSONParse, + ObjectDefineProperty, + ObjectFreeze, + StringFromCharCode, +} = primordials; + +const { + ERR_TLS_CERT_ALTNAME_FORMAT, + ERR_TLS_CERT_ALTNAME_INVALID, + ERR_OUT_OF_RANGE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_ARG_TYPE, +} = require('internal/errors').codes; +const internalUtil = require('internal/util'); +internalUtil.assertCrypto(); +const { + isArrayBufferView, + isUint8Array, +} = require('internal/util/types'); + +const net = require('net'); +const { getOptionValue } = require('internal/options'); +const { + getBundledRootCertificates, + getExtraCACertificates, + getSystemCACertificates, + resetRootCertStore, + getUserRootCertificates, + getSSLCiphers, +} = internalBinding('crypto'); +const { Buffer } = require('buffer'); +const { canonicalizeIP } = internalBinding('cares_wrap'); +const _tls_common = require('_tls_common'); +const _tls_wrap = require('_tls_wrap'); +const { validateString } = require('internal/validators'); + +// Allow {CLIENT_RENEG_LIMIT} client-initiated session renegotiations +// every {CLIENT_RENEG_WINDOW} seconds. An error event is emitted if more +// renegotiations are seen. The settings are applied to all remote client +// connections. +exports.CLIENT_RENEG_LIMIT = 3; +exports.CLIENT_RENEG_WINDOW = 600; + +exports.DEFAULT_CIPHERS = getOptionValue('--tls-cipher-list'); + +exports.DEFAULT_ECDH_CURVE = 'auto'; + +if (getOptionValue('--tls-min-v1.0')) + exports.DEFAULT_MIN_VERSION = 'TLSv1'; +else if (getOptionValue('--tls-min-v1.1')) + exports.DEFAULT_MIN_VERSION = 'TLSv1.1'; +else if (getOptionValue('--tls-min-v1.2')) + exports.DEFAULT_MIN_VERSION = 'TLSv1.2'; +else if (getOptionValue('--tls-min-v1.3')) + exports.DEFAULT_MIN_VERSION = 'TLSv1.3'; +else + exports.DEFAULT_MIN_VERSION = 'TLSv1.2'; + +if (getOptionValue('--tls-max-v1.3')) + exports.DEFAULT_MAX_VERSION = 'TLSv1.3'; +else if (getOptionValue('--tls-max-v1.2')) + exports.DEFAULT_MAX_VERSION = 'TLSv1.2'; +else + exports.DEFAULT_MAX_VERSION = 'TLSv1.3'; // Will depend on node version. + + +exports.getCiphers = internalUtil.cachedResult( + () => internalUtil.filterDuplicateStrings(getSSLCiphers(), true), +); + +let bundledRootCertificates; +function cacheBundledRootCertificates() { + bundledRootCertificates ||= ObjectFreeze(getBundledRootCertificates()); + + return bundledRootCertificates; +} + +ObjectDefineProperty(exports, 'rootCertificates', { + __proto__: null, + configurable: false, + enumerable: true, + get: cacheBundledRootCertificates, +}); + +let extraCACertificates; +function cacheExtraCACertificates() { + extraCACertificates ||= ObjectFreeze(getExtraCACertificates()); + + return extraCACertificates; +} + +let systemCACertificates; +function cacheSystemCACertificates() { + systemCACertificates ||= ObjectFreeze(getSystemCACertificates()); + + return systemCACertificates; +} + +let defaultCACertificates; +let hasResetDefaultCACertificates = false; + +function cacheDefaultCACertificates() { + if (defaultCACertificates) { return defaultCACertificates; } + + if (hasResetDefaultCACertificates) { + defaultCACertificates = getUserRootCertificates(); + ObjectFreeze(defaultCACertificates); + return defaultCACertificates; + } + + defaultCACertificates = []; + + if (!getOptionValue('--use-openssl-ca')) { + const bundled = cacheBundledRootCertificates(); + for (let i = 0; i < bundled.length; ++i) { + ArrayPrototypePush(defaultCACertificates, bundled[i]); + } + if (getOptionValue('--use-system-ca')) { + const system = cacheSystemCACertificates(); + for (let i = 0; i < system.length; ++i) { + + ArrayPrototypePush(defaultCACertificates, system[i]); + } + } + } + + if (process.env.NODE_EXTRA_CA_CERTS) { + const extra = cacheExtraCACertificates(); + for (let i = 0; i < extra.length; ++i) { + + ArrayPrototypePush(defaultCACertificates, extra[i]); + } + } + + ObjectFreeze(defaultCACertificates); + return defaultCACertificates; +} + +// TODO(joyeecheung): support X509Certificate output? +function getCACertificates(type = 'default') { + validateString(type, 'type'); + + switch (type) { + case 'default': + return cacheDefaultCACertificates(); + case 'bundled': + return cacheBundledRootCertificates(); + case 'system': + return cacheSystemCACertificates(); + case 'extra': + return cacheExtraCACertificates(); + default: + throw new ERR_INVALID_ARG_VALUE('type', type); + } +} +exports.getCACertificates = getCACertificates; + +function setDefaultCACertificates(certs) { + if (!ArrayIsArray(certs)) { + throw new ERR_INVALID_ARG_TYPE('certs', 'Array', certs); + } + + // Verify that all elements in the array are strings + for (let i = 0; i < certs.length; i++) { + if (typeof certs[i] !== 'string' && !isArrayBufferView(certs[i])) { + throw new ERR_INVALID_ARG_TYPE( + `certs[${i}]`, ['string', 'ArrayBufferView'], certs[i]); + } + } + + resetRootCertStore(certs); + defaultCACertificates = undefined; // Reset the cached default certificates + hasResetDefaultCACertificates = true; +} + +exports.setDefaultCACertificates = setDefaultCACertificates; + +// Convert protocols array into valid OpenSSL protocols list +// ("\x06spdy/2\x08http/1.1\x08http/1.0") +function convertProtocols(protocols) { + const lens = new Array(protocols.length); + const buff = Buffer.allocUnsafe(protocols.reduce((p, c, i) => { + const len = Buffer.byteLength(c); + if (len > 255) { + throw new ERR_OUT_OF_RANGE('The byte length of the protocol at index ' + + `${i} exceeds the maximum length.`, '<= 255', len, true); + } + lens[i] = len; + return p + 1 + len; + }, 0)); + + let offset = 0; + for (let i = 0, c = protocols.length; i < c; i++) { + buff[offset++] = lens[i]; + buff.write(protocols[i], offset); + offset += lens[i]; + } + + return buff; +} + +exports.convertALPNProtocols = function convertALPNProtocols(protocols, out) { + // If protocols is Array - translate it into buffer + if (ArrayIsArray(protocols)) { + out.ALPNProtocols = convertProtocols(protocols); + } else if (isUint8Array(protocols)) { + // Copy new buffer not to be modified by user. + out.ALPNProtocols = Buffer.from(protocols); + } else if (isArrayBufferView(protocols)) { + out.ALPNProtocols = Buffer.from(protocols.buffer.slice( + protocols.byteOffset, + protocols.byteOffset + protocols.byteLength, + )); + } +}; + +function unfqdn(host) { + return host.replace(/[.]$/, ''); +} + +// String#toLowerCase() is locale-sensitive so we use +// a conservative version that only lowercases A-Z. +function toLowerCase(c) { + return StringFromCharCode(32 + c.charCodeAt(0)); +} + +function splitHost(host) { + return unfqdn(host).replace(/[A-Z]/g, toLowerCase).split('.'); +} + +function check(hostParts, pattern, wildcards) { + // Empty strings, null, undefined, etc. never match. + if (!pattern) + return false; + + const patternParts = splitHost(pattern); + + if (hostParts.length !== patternParts.length) + return false; + + // Pattern has empty components, e.g. "bad..example.com". + if (patternParts.includes('')) + return false; + + // RFC 6125 allows IDNA U-labels (Unicode) in names but we have no + // good way to detect their encoding or normalize them so we simply + // reject them. Control characters and blanks are rejected as well + // because nothing good can come from accepting them. + const isBad = (s) => /[^\u0021-\u007F]/u.test(s); + if (patternParts.some(isBad)) + return false; + + // Check host parts from right to left first. + for (let i = hostParts.length - 1; i > 0; i -= 1) { + if (hostParts[i] !== patternParts[i]) + return false; + } + + const hostSubdomain = hostParts[0]; + const patternSubdomain = patternParts[0]; + const patternSubdomainParts = patternSubdomain.split('*', 3); + + // Short-circuit when the subdomain does not contain a wildcard. + // RFC 6125 does not allow wildcard substitution for components + // containing IDNA A-labels (Punycode) so match those verbatim. + if (patternSubdomainParts.length === 1 || + patternSubdomain.includes('xn--')) + return hostSubdomain === patternSubdomain; + + if (!wildcards) + return false; + + // More than one wildcard is always wrong. + if (patternSubdomainParts.length > 2) + return false; + + // *.tld wildcards are not allowed. + if (patternParts.length <= 2) + return false; + + const { 0: prefix, 1: suffix } = patternSubdomainParts; + + if (prefix.length + suffix.length > hostSubdomain.length) + return false; + + if (!hostSubdomain.startsWith(prefix)) + return false; + + if (!hostSubdomain.endsWith(suffix)) + return false; + + return true; +} + +// This pattern is used to determine the length of escaped sequences within +// the subject alt names string. It allows any valid JSON string literal. +// This MUST match the JSON specification (ECMA-404 / RFC8259) exactly. +const jsonStringPattern = + // eslint-disable-next-line no-control-regex + /^"(?:[^"\\\u0000-\u001f]|\\(?:["\\/bfnrt]|u[0-9a-fA-F]{4}))*"/; + +function splitEscapedAltNames(altNames) { + const result = []; + let currentToken = ''; + let offset = 0; + while (offset !== altNames.length) { + const nextSep = altNames.indexOf(',', offset); + const nextQuote = altNames.indexOf('"', offset); + if (nextQuote !== -1 && (nextSep === -1 || nextQuote < nextSep)) { + // There is a quote character and there is no separator before the quote. + currentToken += altNames.substring(offset, nextQuote); + const match = jsonStringPattern.exec(altNames.substring(nextQuote)); + if (!match) { + throw new ERR_TLS_CERT_ALTNAME_FORMAT(); + } + currentToken += JSONParse(match[0]); + offset = nextQuote + match[0].length; + } else if (nextSep !== -1) { + // There is a separator and no quote before it. + currentToken += altNames.substring(offset, nextSep); + result.push(currentToken); + currentToken = ''; + offset = nextSep + 2; + } else { + currentToken += altNames.substring(offset); + offset = altNames.length; + } + } + result.push(currentToken); + return result; +} + +exports.checkServerIdentity = function checkServerIdentity(hostname, cert) { + const subject = cert.subject; + const altNames = cert.subjectaltname; + const dnsNames = []; + const ips = []; + + hostname = '' + hostname; + + if (altNames) { + const splitAltNames = altNames.includes('"') ? + splitEscapedAltNames(altNames) : + altNames.split(', '); + splitAltNames.forEach((name) => { + if (name.startsWith('DNS:')) { + dnsNames.push(name.slice(4)); + } else if (name.startsWith('IP Address:')) { + ips.push(canonicalizeIP(name.slice(11))); + } + }); + } + + let valid = false; + let reason = 'Unknown reason'; + + hostname = unfqdn(hostname); // Remove trailing dot for error messages. + + if (net.isIP(hostname)) { + valid = ips.includes(canonicalizeIP(hostname)); + if (!valid) + reason = `IP: ${hostname} is not in the cert's list: ` + ips.join(', '); + } else if (dnsNames.length > 0 || subject?.CN) { + const hostParts = splitHost(hostname); + const wildcard = (pattern) => check(hostParts, pattern, true); + + if (dnsNames.length > 0) { + valid = dnsNames.some(wildcard); + if (!valid) + reason = + `Host: ${hostname}. is not in the cert's altnames: ${altNames}`; + } else { + // Match against Common Name only if no supported identifiers exist. + const cn = subject.CN; + + if (ArrayIsArray(cn)) + valid = cn.some(wildcard); + else if (cn) + valid = wildcard(cn); + + if (!valid) + reason = `Host: ${hostname}. is not cert's CN: ${cn}`; + } + } else { + reason = 'Cert does not contain a DNS name'; + } + + if (!valid) { + return new ERR_TLS_CERT_ALTNAME_INVALID(reason, hostname, cert); + } +}; + +exports.createSecureContext = _tls_common.createSecureContext; +exports.SecureContext = _tls_common.SecureContext; +exports.TLSSocket = _tls_wrap.TLSSocket; +exports.Server = _tls_wrap.Server; +exports.createServer = _tls_wrap.createServer; +exports.connect = _tls_wrap.connect; \ No newline at end of file diff --git a/.codesandbox/node/trace_events.js b/.codesandbox/node/trace_events.js new file mode 100644 index 00000000..69fccafc --- /dev/null +++ b/.codesandbox/node/trace_events.js @@ -0,0 +1,75 @@ +'use strict'; + +const { + ObjectAssign, + ObjectDefineProperty, +} = primordials; + +const { test, suite, before, after, beforeEach, afterEach } = require('internal/test_runner/harness'); +const { run } = require('internal/test_runner/runner'); + +module.exports = test; +ObjectAssign(module.exports, { + after, + afterEach, + before, + beforeEach, + describe: suite, + it: test, + run, + suite, + test, +}); + +let lazyMock; + +ObjectDefineProperty(module.exports, 'mock', { + __proto__: null, + configurable: true, + enumerable: true, + get() { + if (lazyMock === undefined) { + const { MockTracker } = require('internal/test_runner/mock/mock'); + + lazyMock = new MockTracker(); + } + + return lazyMock; + }, +}); + +let lazySnapshot; + +ObjectDefineProperty(module.exports, 'snapshot', { + __proto__: null, + configurable: true, + enumerable: true, + get() { + if (lazySnapshot === undefined) { + const { + setDefaultSnapshotSerializers, + setResolveSnapshotPath, + } = require('internal/test_runner/snapshot'); + + lazySnapshot = { + __proto__: null, + setDefaultSnapshotSerializers, + setResolveSnapshotPath, + }; + } + + return lazySnapshot; + }, +}); + +ObjectDefineProperty(module.exports, 'assert', { + __proto__: null, + configurable: true, + enumerable: true, + get() { + const { register } = require('internal/test_runner/assert'); + const assert = { __proto__: null, register }; + ObjectDefineProperty(module.exports, 'assert', assert); + return assert; + }, +}); \ No newline at end of file diff --git a/.codesandbox/node/tty.js b/.codesandbox/node/tty.js new file mode 100644 index 00000000..f9275fab --- /dev/null +++ b/.codesandbox/node/tty.js @@ -0,0 +1,147 @@ +'use strict'; + +const { + NumberIsInteger, + ObjectSetPrototypeOf, +} = primordials; + +const net = require('net'); +const { TTY, isTTY } = internalBinding('tty_wrap'); +const { + ErrnoException, + codes: { + ERR_INVALID_FD, + ERR_TTY_INIT_FAILED, + }, +} = require('internal/errors'); +const { + getColorDepth, + hasColors, +} = require('internal/tty'); + +// Lazy loaded for startup performance. +let readline; + +function isatty(fd) { + return NumberIsInteger(fd) && fd >= 0 && fd <= 2147483647 && + isTTY(fd); +} + +function ReadStream(fd, options) { + if (!(this instanceof ReadStream)) + return new ReadStream(fd, options); + if (fd >> 0 !== fd || fd < 0) + throw new ERR_INVALID_FD(fd); + + const ctx = {}; + const tty = new TTY(fd, ctx); + if (ctx.code !== undefined) { + throw new ERR_TTY_INIT_FAILED(ctx); + } + + net.Socket.call(this, { + readableHighWaterMark: 0, + handle: tty, + manualStart: true, + ...options, + }); + + this.isRaw = false; + this.isTTY = true; +} + +ObjectSetPrototypeOf(ReadStream.prototype, net.Socket.prototype); +ObjectSetPrototypeOf(ReadStream, net.Socket); + +ReadStream.prototype.setRawMode = function(flag) { + flag = !!flag; + const err = this._handle?.setRawMode(flag); + if (err) { + this.emit('error', new ErrnoException(err, 'setRawMode')); + return this; + } + this.isRaw = flag; + return this; +}; + +function WriteStream(fd) { + if (!(this instanceof WriteStream)) + return new WriteStream(fd); + if (fd >> 0 !== fd || fd < 0) + throw new ERR_INVALID_FD(fd); + + const ctx = {}; + const tty = new TTY(fd, ctx); + if (ctx.code !== undefined) { + throw new ERR_TTY_INIT_FAILED(ctx); + } + + net.Socket.call(this, { + readableHighWaterMark: 0, + handle: tty, + manualStart: true, + }); + + // Prevents interleaved or dropped stdout/stderr output for terminals. + // As noted in the following reference, local TTYs tend to be quite fast and + // this behavior has become expected due historical functionality on OS X, + // even though it was originally intended to change in v1.0.2 (Libuv 1.2.1). + // Ref: https://github.com/nodejs/node/pull/1771#issuecomment-119351671 + this._handle.setBlocking(true); + + const winSize = [0, 0]; + const err = this._handle.getWindowSize(winSize); + if (!err) { + this.columns = winSize[0]; + this.rows = winSize[1]; + } +} + +ObjectSetPrototypeOf(WriteStream.prototype, net.Socket.prototype); +ObjectSetPrototypeOf(WriteStream, net.Socket); + +WriteStream.prototype.isTTY = true; + +WriteStream.prototype.getColorDepth = getColorDepth; + +WriteStream.prototype.hasColors = hasColors; + +WriteStream.prototype._refreshSize = function() { + const oldCols = this.columns; + const oldRows = this.rows; + const winSize = [0, 0]; + const err = this._handle.getWindowSize(winSize); + if (err) { + this.emit('error', new ErrnoException(err, 'getWindowSize')); + return; + } + const { 0: newCols, 1: newRows } = winSize; + if (oldCols !== newCols || oldRows !== newRows) { + this.columns = newCols; + this.rows = newRows; + this.emit('resize'); + } +}; + +// Backwards-compat +WriteStream.prototype.cursorTo = function(x, y, callback) { + if (readline === undefined) readline = require('readline'); + return readline.cursorTo(this, x, y, callback); +}; +WriteStream.prototype.moveCursor = function(dx, dy, callback) { + if (readline === undefined) readline = require('readline'); + return readline.moveCursor(this, dx, dy, callback); +}; +WriteStream.prototype.clearLine = function(dir, callback) { + if (readline === undefined) readline = require('readline'); + return readline.clearLine(this, dir, callback); +}; +WriteStream.prototype.clearScreenDown = function(callback) { + if (readline === undefined) readline = require('readline'); + return readline.clearScreenDown(this, callback); +}; +WriteStream.prototype.getWindowSize = function() { + return [this.columns, this.rows]; +}; + +module.exports = { isatty, ReadStream, WriteStream }; \ No newline at end of file diff --git a/.codesandbox/node/url.js b/.codesandbox/node/url.js new file mode 100644 index 00000000..7e0f36ee --- /dev/null +++ b/.codesandbox/node/url.js @@ -0,0 +1,1028 @@ +'use strict'; + +const { + ArrayPrototypeJoin, + Boolean, + Int8Array, + ObjectAssign, + ObjectKeys, + StringPrototypeAt, + StringPrototypeCharCodeAt, + StringPrototypeIndexOf, + StringPrototypeReplaceAll, + StringPrototypeSlice, + decodeURIComponent, +} = primordials; + +const { URLPattern } = internalBinding('url_pattern'); +const { toASCII } = internalBinding('encoding_binding'); +const { encodeStr, hexTable } = require('internal/querystring'); +const querystring = require('querystring'); + +const { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_URL, +} = require('internal/errors').codes; +const { + validateString, + validateObject, +} = require('internal/validators'); + +// This ensures setURLConstructor() is called before the native +// URL::ToObject() method is used. +const { spliceOne } = require('internal/util'); +const { isInsideNodeModules } = internalBinding('util'); + +// WHATWG URL implementation provided by internal/url +const { + URL, + URLSearchParams, + domainToASCII, + domainToUnicode, + fileURLToPath, + fileURLToPathBuffer, + pathToFileURL: _pathToFileURL, + urlToHttpOptions, + unsafeProtocol, + hostlessProtocol, + slashedProtocol, +} = require('internal/url'); + +const bindingUrl = internalBinding('url'); + +// Original url.parse() API + +function Url() { + this.protocol = null; + this.slashes = null; + this.auth = null; + this.host = null; + this.port = null; + this.hostname = null; + this.hash = null; + this.search = null; + this.query = null; + this.pathname = null; + this.path = null; + this.href = null; +} + +// Reference: RFC 3986, RFC 1808, RFC 2396 + +// define these here so at least they only have to be +// compiled once on the first module load. +const protocolPattern = /^[a-z0-9.+-]+:/i; +const portPattern = /:[0-9]*$/; +const hostPattern = /^\/\/[^@/]+@[^@/]+/; + +// Special case for a simple path URL +const simplePathPattern = /^(\/\/?(?!\/)[^?\s]*)(\?[^\s]*)?$/; + +const hostnameMaxLen = 255; +const { + CHAR_SPACE, + CHAR_TAB, + CHAR_CARRIAGE_RETURN, + CHAR_LINE_FEED, + CHAR_NO_BREAK_SPACE, + CHAR_ZERO_WIDTH_NOBREAK_SPACE, + CHAR_HASH, + CHAR_FORWARD_SLASH, + CHAR_LEFT_SQUARE_BRACKET, + CHAR_RIGHT_SQUARE_BRACKET, + CHAR_LEFT_ANGLE_BRACKET, + CHAR_RIGHT_ANGLE_BRACKET, + CHAR_LEFT_CURLY_BRACKET, + CHAR_RIGHT_CURLY_BRACKET, + CHAR_QUESTION_MARK, + CHAR_DOUBLE_QUOTE, + CHAR_SINGLE_QUOTE, + CHAR_PERCENT, + CHAR_SEMICOLON, + CHAR_BACKWARD_SLASH, + CHAR_CIRCUMFLEX_ACCENT, + CHAR_GRAVE_ACCENT, + CHAR_VERTICAL_LINE, + CHAR_AT, + CHAR_COLON, +} = require('internal/constants'); + +let urlParseWarned = false; + +function urlParse(url, parseQueryString, slashesDenoteHost) { + if (!urlParseWarned && !isInsideNodeModules(100, true)) { + urlParseWarned = true; + process.emitWarning( + '`url.parse()` behavior is not standardized and prone to ' + + 'errors that have security implications. Use the WHATWG URL API ' + + 'instead. CVEs are not issued for `url.parse()` vulnerabilities.', + 'DeprecationWarning', + 'DEP0169', + ); + } + + if (url instanceof Url) return url; + + const urlObject = new Url(); + urlObject.parse(url, parseQueryString, slashesDenoteHost); + return urlObject; +} + +function isIpv6Hostname(hostname) { + return ( + StringPrototypeCharCodeAt(hostname, 0) === CHAR_LEFT_SQUARE_BRACKET && + StringPrototypeCharCodeAt(hostname, hostname.length - 1) === + CHAR_RIGHT_SQUARE_BRACKET + ); +} + +// This prevents some common spoofing bugs due to our use of IDNA toASCII. For +// compatibility, the set of characters we use here is the *intersection* of +// "forbidden host code point" in the WHATWG URL Standard [1] and the +// characters in the host parsing loop in Url.prototype.parse, with the +// following additions: +// +// - ':' since this could cause a "protocol spoofing" bug +// - '@' since this could cause parts of the hostname to be confused with auth +// - '[' and ']' since this could cause a non-IPv6 hostname to be interpreted +// as IPv6 by isIpv6Hostname above +// +// [1]: https://url.spec.whatwg.org/#forbidden-host-code-point +const forbiddenHostChars = /[\0\t\n\r #%/:<>?@[\\\]^|]/; +// For IPv6, permit '[', ']', and ':'. +const forbiddenHostCharsIpv6 = /[\0\t\n\r #%/<>?@\\^|]/; + +Url.prototype.parse = function parse(url, parseQueryString, slashesDenoteHost) { + validateString(url, 'url'); + + // Copy chrome, IE, opera backslash-handling behavior. + // Back slashes before the query string get converted to forward slashes + // See: https://code.google.com/p/chromium/issues/detail?id=25916 + let hasHash = false; + let hasAt = false; + let start = -1; + let end = -1; + let rest = ''; + let lastPos = 0; + for (let i = 0, inWs = false, split = false; i < url.length; ++i) { + const code = url.charCodeAt(i); + + // Find first and last non-whitespace characters for trimming + const isWs = code < 33 || + code === CHAR_NO_BREAK_SPACE || + code === CHAR_ZERO_WIDTH_NOBREAK_SPACE; + if (start === -1) { + if (isWs) + continue; + lastPos = start = i; + } else if (inWs) { + if (!isWs) { + end = -1; + inWs = false; + } + } else if (isWs) { + end = i; + inWs = true; + } + + // Only convert backslashes while we haven't seen a split character + if (!split) { + switch (code) { + case CHAR_AT: + hasAt = true; + break; + case CHAR_HASH: + hasHash = true; + // Fall through + case CHAR_QUESTION_MARK: + split = true; + break; + case CHAR_BACKWARD_SLASH: + if (i - lastPos > 0) + rest += url.slice(lastPos, i); + rest += '/'; + lastPos = i + 1; + break; + } + } else if (!hasHash && code === CHAR_HASH) { + hasHash = true; + } + } + + // Check if string was non-empty (including strings with only whitespace) + if (start !== -1) { + if (lastPos === start) { + // We didn't convert any backslashes + + if (end === -1) { + if (start === 0) + rest = url; + else + rest = url.slice(start); + } else { + rest = url.slice(start, end); + } + } else if (end === -1 && lastPos < url.length) { + // We converted some backslashes and have only part of the entire string + rest += url.slice(lastPos); + } else if (end !== -1 && lastPos < end) { + // We converted some backslashes and have only part of the entire string + rest += url.slice(lastPos, end); + } + } + + if (!slashesDenoteHost && !hasHash && !hasAt) { + // Try fast path regexp + const simplePath = simplePathPattern.exec(rest); + if (simplePath) { + this.path = rest; + this.href = rest; + this.pathname = simplePath[1]; + if (simplePath[2]) { + this.search = simplePath[2]; + if (parseQueryString) { + this.query = querystring.parse(this.search.slice(1)); + } else { + this.query = this.search.slice(1); + } + } else if (parseQueryString) { + this.search = null; + this.query = { __proto__: null }; + } + return this; + } + } + + let proto = protocolPattern.exec(rest); + let lowerProto; + if (proto) { + proto = proto[0]; + lowerProto = proto.toLowerCase(); + this.protocol = lowerProto; + rest = rest.slice(proto.length); + } + + // Figure out if it's got a host + // user@server is *always* interpreted as a hostname, and url + // resolution will treat //foo/bar as host=foo,path=bar because that's + // how the browser resolves relative URLs. + let slashes; + if (slashesDenoteHost || proto || hostPattern.test(rest)) { + slashes = rest.charCodeAt(0) === CHAR_FORWARD_SLASH && + rest.charCodeAt(1) === CHAR_FORWARD_SLASH; + if (slashes && !(proto && hostlessProtocol.has(lowerProto))) { + rest = rest.slice(2); + this.slashes = true; + } + } + + if (!hostlessProtocol.has(lowerProto) && + (slashes || (proto && !slashedProtocol.has(proto)))) { + + // there's a hostname. + // the first instance of /, ?, ;, or # ends the host. + // + // If there is an @ in the hostname, then non-host chars *are* allowed + // to the left of the last @ sign, unless some host-ending character + // comes *before* the @-sign. + // URLs are obnoxious. + // + // ex: + // http://a@b@c/ => user:a@b host:c + // http://a@b?@c => user:a host:b path:/?@c + + let hostEnd = -1; + let atSign = -1; + let nonHost = -1; + for (let i = 0; i < rest.length; ++i) { + switch (rest.charCodeAt(i)) { + case CHAR_TAB: + case CHAR_LINE_FEED: + case CHAR_CARRIAGE_RETURN: + // WHATWG URL removes tabs, newlines, and carriage returns. Let's do that too. + rest = rest.slice(0, i) + rest.slice(i + 1); + i -= 1; + break; + case CHAR_SPACE: + case CHAR_DOUBLE_QUOTE: + case CHAR_PERCENT: + case CHAR_SINGLE_QUOTE: + case CHAR_SEMICOLON: + case CHAR_LEFT_ANGLE_BRACKET: + case CHAR_RIGHT_ANGLE_BRACKET: + case CHAR_BACKWARD_SLASH: + case CHAR_CIRCUMFLEX_ACCENT: + case CHAR_GRAVE_ACCENT: + case CHAR_LEFT_CURLY_BRACKET: + case CHAR_VERTICAL_LINE: + case CHAR_RIGHT_CURLY_BRACKET: + // Characters that are never ever allowed in a hostname from RFC 2396 + if (nonHost === -1) + nonHost = i; + break; + case CHAR_HASH: + case CHAR_FORWARD_SLASH: + case CHAR_QUESTION_MARK: + // Find the first instance of any host-ending characters + if (nonHost === -1) + nonHost = i; + hostEnd = i; + break; + case CHAR_AT: + // At this point, either we have an explicit point where the + // auth portion cannot go past, or the last @ char is the decider. + atSign = i; + nonHost = -1; + break; + } + if (hostEnd !== -1) + break; + } + start = 0; + if (atSign !== -1) { + this.auth = decodeURIComponent(rest.slice(0, atSign)); + start = atSign + 1; + } + if (nonHost === -1) { + this.host = rest.slice(start); + rest = ''; + } else { + this.host = rest.slice(start, nonHost); + rest = rest.slice(nonHost); + } + + // pull out port. + this.parseHost(); + + // We've indicated that there is a hostname, + // so even if it's empty, it has to be present. + if (typeof this.hostname !== 'string') + this.hostname = ''; + + const hostname = this.hostname; + + // If hostname begins with [ and ends with ] + // assume that it's an IPv6 address. + const ipv6Hostname = isIpv6Hostname(hostname); + + // validate a little. + if (!ipv6Hostname) { + rest = getHostname(this, rest, hostname, url); + } + + if (this.hostname.length > hostnameMaxLen) { + this.hostname = ''; + } else { + // Hostnames are always lower case. + this.hostname = this.hostname.toLowerCase(); + } + + if (this.hostname !== '') { + if (ipv6Hostname) { + if (forbiddenHostCharsIpv6.test(this.hostname)) { + throw new ERR_INVALID_URL(url); + } + } else { + // IDNA Support: Returns a punycoded representation of "domain". + // It only converts parts of the domain name that + // have non-ASCII characters, i.e. it doesn't matter if + // you call it with a domain that already is ASCII-only. + this.hostname = toASCII(this.hostname); + + // Prevent two potential routes of hostname spoofing. + // 1. If this.hostname is empty, it must have become empty due to toASCII + // since we checked this.hostname above. + // 2. If any of forbiddenHostChars appears in this.hostname, it must have + // also gotten in due to toASCII. This is since getHostname would have + // filtered them out otherwise. + // Rather than trying to correct this by moving the non-host part into + // the pathname as we've done in getHostname, throw an exception to + // convey the severity of this issue. + if (this.hostname === '' || forbiddenHostChars.test(this.hostname)) { + throw new ERR_INVALID_URL(url); + } + } + } + + const p = this.port ? ':' + this.port : ''; + const h = this.hostname || ''; + this.host = h + p; + + // strip [ and ] from the hostname + // the host field still retains them, though + if (ipv6Hostname) { + this.hostname = this.hostname.slice(1, -1); + if (rest[0] !== '/') { + rest = '/' + rest; + } + } + } + + // Now rest is set to the post-host stuff. + // Chop off any delim chars. + if (!unsafeProtocol.has(lowerProto)) { + // First, make 100% sure that any "autoEscape" chars get + // escaped, even if encodeURIComponent doesn't think they + // need to be. + rest = autoEscapeStr(rest); + } + + let questionIdx = -1; + let hashIdx = -1; + for (let i = 0; i < rest.length; ++i) { + const code = rest.charCodeAt(i); + if (code === CHAR_HASH) { + this.hash = rest.slice(i); + hashIdx = i; + break; + } else if (code === CHAR_QUESTION_MARK && questionIdx === -1) { + questionIdx = i; + } + } + + if (questionIdx !== -1) { + if (hashIdx === -1) { + this.search = rest.slice(questionIdx); + this.query = rest.slice(questionIdx + 1); + } else { + this.search = rest.slice(questionIdx, hashIdx); + this.query = rest.slice(questionIdx + 1, hashIdx); + } + if (parseQueryString) { + this.query = querystring.parse(this.query); + } + } else if (parseQueryString) { + // No query string, but parseQueryString still requested + this.search = null; + this.query = { __proto__: null }; + } + + const useQuestionIdx = + questionIdx !== -1 && (hashIdx === -1 || questionIdx < hashIdx); + const firstIdx = useQuestionIdx ? questionIdx : hashIdx; + if (firstIdx === -1) { + if (rest.length > 0) + this.pathname = rest; + } else if (firstIdx > 0) { + this.pathname = rest.slice(0, firstIdx); + } + if (slashedProtocol.has(lowerProto) && + this.hostname && !this.pathname) { + this.pathname = '/'; + } + + // To support http.request + if (this.pathname || this.search) { + const p = this.pathname || ''; + const s = this.search || ''; + this.path = p + s; + } + + // Finally, reconstruct the href based on what has been validated. + this.href = this.format(); + return this; +}; + +let warnInvalidPort = true; +function getHostname(self, rest, hostname, url) { + for (let i = 0; i < hostname.length; ++i) { + const code = hostname.charCodeAt(i); + const isValid = (code !== CHAR_FORWARD_SLASH && + code !== CHAR_BACKWARD_SLASH && + code !== CHAR_HASH && + code !== CHAR_QUESTION_MARK && + code !== CHAR_COLON); + + if (!isValid) { + // If leftover starts with :, then it represents an invalid port. + // But url.parse() is lenient about it for now. + // Issue a warning and continue. + if (warnInvalidPort && code === CHAR_COLON) { + const detail = `The URL ${url} is invalid. Future versions of Node.js will throw an error.`; + process.emitWarning(detail, 'DeprecationWarning', 'DEP0170'); + warnInvalidPort = false; + } + self.hostname = hostname.slice(0, i); + return `/${hostname.slice(i)}${rest}`; + } + } + return rest; +} + +// Escaped characters. Use empty strings to fill up unused entries. +// Using Array is faster than Object/Map +const escapedCodes = [ + /* 0 - 9 */ '', '', '', '', '', '', '', '', '', '%09', + /* 10 - 19 */ '%0A', '', '', '%0D', '', '', '', '', '', '', + /* 20 - 29 */ '', '', '', '', '', '', '', '', '', '', + /* 30 - 39 */ '', '', '%20', '', '%22', '', '', '', '', '%27', + /* 40 - 49 */ '', '', '', '', '', '', '', '', '', '', + /* 50 - 59 */ '', '', '', '', '', '', '', '', '', '', + /* 60 - 69 */ '%3C', '', '%3E', '', '', '', '', '', '', '', + /* 70 - 79 */ '', '', '', '', '', '', '', '', '', '', + /* 80 - 89 */ '', '', '', '', '', '', '', '', '', '', + /* 90 - 99 */ '', '', '%5C', '', '%5E', '', '%60', '', '', '', + /* 100 - 109 */ '', '', '', '', '', '', '', '', '', '', + /* 110 - 119 */ '', '', '', '', '', '', '', '', '', '', + /* 120 - 125 */ '', '', '', '%7B', '%7C', '%7D', +]; + +// Automatically escape all delimiters and unwise characters from RFC 2396. +// Also escape single quotes in case of an XSS attack. +// Return the escaped string. +function autoEscapeStr(rest) { + let escaped = ''; + let lastEscapedPos = 0; + for (let i = 0; i < rest.length; ++i) { + // `escaped` contains substring up to the last escaped character. + const escapedChar = escapedCodes[rest.charCodeAt(i)]; + if (escapedChar) { + // Concat if there are ordinary characters in the middle. + if (i > lastEscapedPos) + escaped += rest.slice(lastEscapedPos, i); + escaped += escapedChar; + lastEscapedPos = i + 1; + } + } + if (lastEscapedPos === 0) // Nothing has been escaped. + return rest; + + // There are ordinary characters at the end. + if (lastEscapedPos < rest.length) + escaped += rest.slice(lastEscapedPos); + + return escaped; +} + +// Format a parsed object into a url string +function urlFormat(urlObject, options) { + // Ensure it's an object, and not a string url. + // If it's an object, this is a no-op. + // this way, you can call urlParse() on strings + // to clean up potentially wonky urls. + if (typeof urlObject === 'string') { + urlObject = urlParse(urlObject); + } else if (typeof urlObject !== 'object' || urlObject === null) { + throw new ERR_INVALID_ARG_TYPE('urlObject', + ['Object', 'string'], urlObject); + } else if (urlObject instanceof URL) { + let fragment = true; + let unicode = false; + let search = true; + let auth = true; + + if (options) { + validateObject(options, 'options'); + + if (options.fragment != null) { + fragment = Boolean(options.fragment); + } + + if (options.unicode != null) { + unicode = Boolean(options.unicode); + } + + if (options.search != null) { + search = Boolean(options.search); + } + + if (options.auth != null) { + auth = Boolean(options.auth); + } + } + + return bindingUrl.format(urlObject.href, fragment, unicode, search, auth); + } + + return Url.prototype.format.call(urlObject); +} + +// These characters do not need escaping: +// ! - . _ ~ +// ' ( ) * : +// digits +// alpha (uppercase) +// alpha (lowercase) +const noEscapeAuth = new Int8Array([ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0x00 - 0x0F + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0x10 - 0x1F + 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 0, // 0x20 - 0x2F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, // 0x30 - 0x3F + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 0x40 - 0x4F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, // 0x50 - 0x5F + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 0x60 - 0x6F + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, // 0x70 - 0x7F +]); + +Url.prototype.format = function format() { + let auth = this.auth || ''; + if (auth) { + auth = encodeStr(auth, noEscapeAuth, hexTable); + auth += '@'; + } + + let protocol = this.protocol || ''; + if (protocol && StringPrototypeCharCodeAt(protocol, protocol.length - 1) !== 58 /* : */) { + protocol += ':'; + } + + let pathname = this.pathname || ''; + let hash = this.hash || ''; + let host = ''; + let query = ''; + + if (this.host) { + host = auth + this.host; + } else if (this.hostname) { + host = auth + ( + StringPrototypeIndexOf(this.hostname, ':') !== -1 && !isIpv6Hostname(this.hostname) ? + '[' + this.hostname + ']' : + this.hostname + ); + if (this.port) { + host += ':' + this.port; + } + } + + if (this.query !== null && typeof this.query === 'object') { + query = querystring.stringify(this.query); + } + let search = this.search || (query && ('?' + query)) || ''; + + if (StringPrototypeIndexOf(pathname, '#') !== -1 || StringPrototypeIndexOf(pathname, '?') !== -1) { + let newPathname = ''; + let lastPos = 0; + const len = pathname.length; + for (let i = 0; i < len; i++) { + const code = StringPrototypeCharCodeAt(pathname, i); + if (code === CHAR_HASH || code === CHAR_QUESTION_MARK) { + if (i > lastPos) { + newPathname += StringPrototypeSlice(pathname, lastPos, i); + } + newPathname += (code === CHAR_HASH ? '%23' : '%3F'); + lastPos = i + 1; + } + } + if (lastPos < len) { + newPathname += StringPrototypeSlice(pathname, lastPos); + } + pathname = newPathname; + } + + // Only the slashedProtocols get the //. Not mailto:, xmpp:, etc. + // unless they had them to begin with. + if (this.slashes || slashedProtocol.has(protocol)) { + if (this.slashes || host) { + if (pathname && StringPrototypeCharCodeAt(pathname, 0) !== CHAR_FORWARD_SLASH) + pathname = '/' + pathname; + host = '//' + host; + } else if (protocol.length >= 4 && + StringPrototypeCharCodeAt(protocol, 0) === 102/* f */ && + StringPrototypeCharCodeAt(protocol, 1) === 105/* i */ && + StringPrototypeCharCodeAt(protocol, 2) === 108/* l */ && + StringPrototypeCharCodeAt(protocol, 3) === 101/* e */) { + host = '//'; + } + } + + // Escape '#' in search. + if (StringPrototypeIndexOf(search, '#') !== -1) { + search = StringPrototypeReplaceAll(search, '#', '%23'); + } + + if (hash && StringPrototypeCharCodeAt(hash, 0) !== CHAR_HASH) { + hash = '#' + hash; + } + if (search && StringPrototypeCharCodeAt(search, 0) !== CHAR_QUESTION_MARK) { + search = '?' + search; + } + + return protocol + host + pathname + search + hash; +}; + +function urlResolve(source, relative) { + return urlParse(source, false, true).resolve(relative); +} + +Url.prototype.resolve = function resolve(relative) { + return this.resolveObject(urlParse(relative, false, true)).format(); +}; + +function urlResolveObject(source, relative) { + if (!source) return relative; + return urlParse(source, false, true).resolveObject(relative); +} + +Url.prototype.resolveObject = function resolveObject(relative) { + if (typeof relative === 'string') { + const rel = new Url(); + rel.parse(relative, false, true); + relative = rel; + } + + const result = new Url(); + ObjectAssign(result, this); + + // Hash is always overridden, no matter what. + // even href="" will remove it. + result.hash = relative.hash; + + // If the relative url is empty, then there's nothing left to do here. + if (relative.href === '') { + result.href = result.format(); + return result; + } + + // Hrefs like //foo/bar always cut to the protocol. + if (relative.slashes && !relative.protocol) { + // Take everything except the protocol from relative + const relativeWithoutProtocol = ObjectKeys(relative).reduce((acc, key) => { + if (key !== 'protocol') { + acc[key] = relative[key]; + } + return acc; + }, {}); + ObjectAssign(result, relativeWithoutProtocol); + + // urlParse appends trailing / to urls like http://www.example.com + if (slashedProtocol.has(result.protocol) && + result.hostname && !result.pathname) { + result.path = result.pathname = '/'; + } + + result.href = result.format(); + return result; + } + + if (relative.protocol && relative.protocol !== result.protocol) { + // If it's a known url protocol, then changing + // the protocol does weird things + // first, if it's not file:, then we MUST have a host, + // and if there was a path + // to begin with, then we MUST have a path. + // if it is file:, then the host is dropped, + // because that's known to be hostless. + // anything else is assumed to be absolute. + if (!slashedProtocol.has(relative.protocol)) { + ObjectAssign(result, relative); + result.href = result.format(); + return result; + } + + result.protocol = relative.protocol; + if (!relative.host && + !/^file:?$/.test(relative.protocol) && + !hostlessProtocol.has(relative.protocol)) { + const relPath = (relative.pathname || '').split('/'); + while (relPath.length && !(relative.host = relPath.shift())); + relative.host ||= ''; + relative.hostname ||= ''; + if (relPath[0] !== '') relPath.unshift(''); + if (relPath.length < 2) relPath.unshift(''); + result.pathname = relPath.join('/'); + } else { + result.pathname = relative.pathname; + } + result.search = relative.search; + result.query = relative.query; + result.host = relative.host || ''; + result.auth = relative.auth; + result.hostname = relative.hostname || relative.host; + result.port = relative.port; + // To support http.request + if (result.pathname || result.search) { + const p = result.pathname || ''; + const s = result.search || ''; + result.path = p + s; + } + result.slashes ||= relative.slashes; + result.href = result.format(); + return result; + } + + const isSourceAbs = (result.pathname && result.pathname.charAt(0) === '/'); + const isRelAbs = ( + relative.host || (relative.pathname && relative.pathname.charAt(0) === '/') + ); + let mustEndAbs = (isRelAbs || isSourceAbs || + (result.host && relative.pathname)); + const removeAllDots = mustEndAbs; + let srcPath = (result.pathname && result.pathname.split('/')) || []; + const relPath = (relative.pathname && relative.pathname.split('/')) || []; + const noLeadingSlashes = result.protocol && + !slashedProtocol.has(result.protocol); + + // If the url is a non-slashed url, then relative + // links like ../.. should be able + // to crawl up to the hostname, as well. This is strange. + // result.protocol has already been set by now. + // Later on, put the first path part into the host field. + if (noLeadingSlashes) { + result.hostname = ''; + result.port = null; + if (result.host) { + if (srcPath[0] === '') srcPath[0] = result.host; + else srcPath.unshift(result.host); + } + result.host = ''; + if (relative.protocol) { + relative.hostname = null; + relative.port = null; + result.auth = null; + if (relative.host) { + if (relPath[0] === '') relPath[0] = relative.host; + else relPath.unshift(relative.host); + } + relative.host = null; + } + mustEndAbs &&= (relPath[0] === '' || srcPath[0] === ''); + } + + if (isRelAbs) { + // it's absolute. + if (relative.host || relative.host === '') { + if (result.host !== relative.host) result.auth = null; + result.host = relative.host; + result.port = relative.port; + } + if (relative.hostname || relative.hostname === '') { + if (result.hostname !== relative.hostname) result.auth = null; + result.hostname = relative.hostname; + } + result.search = relative.search; + result.query = relative.query; + srcPath = relPath; + // Fall through to the dot-handling below. + } else if (relPath.length) { + // it's relative + // throw away the existing file, and take the new path instead. + srcPath ||= []; + srcPath.pop(); + srcPath = srcPath.concat(relPath); + result.search = relative.search; + result.query = relative.query; + } else if (relative.search !== null && relative.search !== undefined) { + // Just pull out the search. + // like href='?foo'. + // Put this after the other two cases because it simplifies the booleans + if (noLeadingSlashes) { + result.hostname = result.host = srcPath.shift(); + // Occasionally the auth can get stuck only in host. + // This especially happens in cases like + // url.resolveObject('mailto:local1@domain1', 'local2@domain2') + const authInHost = + result.host && result.host.indexOf('@') > 0 && result.host.split('@'); + if (authInHost) { + result.auth = authInHost.shift(); + result.host = result.hostname = authInHost.shift(); + } + } + result.search = relative.search; + result.query = relative.query; + // To support http.request + if (result.pathname !== null || result.search !== null) { + result.path = (result.pathname ? result.pathname : '') + + (result.search ? result.search : ''); + } + result.href = result.format(); + return result; + } + + if (!srcPath.length) { + // No path at all. All other things were already handled above. + result.pathname = null; + // To support http.request + if (result.search) { + result.path = '/' + result.search; + } else { + result.path = null; + } + result.href = result.format(); + return result; + } + + // If a url ENDs in . or .., then it must get a trailing slash. + // however, if it ends in anything else non-slashy, + // then it must NOT get a trailing slash. + let last = srcPath[srcPath.length - 1]; + const hasTrailingSlash = ( + ((result.host || relative.host || srcPath.length > 1) && + (last === '.' || last === '..')) || last === ''); + + // Strip single dots, resolve double dots to parent dir + // if the path tries to go above the root, `up` ends up > 0 + let up = 0; + for (let i = srcPath.length - 1; i >= 0; i--) { + last = srcPath[i]; + if (last === '.') { + spliceOne(srcPath, i); + } else if (last === '..') { + spliceOne(srcPath, i); + up++; + } else if (up) { + spliceOne(srcPath, i); + up--; + } + } + + // If the path is allowed to go above the root, restore leading ..s + if (!mustEndAbs && !removeAllDots) { + while (up--) { + srcPath.unshift('..'); + } + } + + if (mustEndAbs && srcPath[0] !== '' && + (!srcPath[0] || srcPath[0].charAt(0) !== '/')) { + srcPath.unshift(''); + } + + if (hasTrailingSlash && StringPrototypeAt(ArrayPrototypeJoin(srcPath, '/'), -1) !== '/') { + srcPath.push(''); + } + + const isAbsolute = srcPath[0] === '' || + (srcPath[0] && srcPath[0].charAt(0) === '/'); + + // put the host back + if (noLeadingSlashes) { + result.hostname = + result.host = isAbsolute ? '' : srcPath.length ? srcPath.shift() : ''; + // Occasionally the auth can get stuck only in host. + // This especially happens in cases like + // url.resolveObject('mailto:local1@domain1', 'local2@domain2') + const authInHost = result.host && result.host.indexOf('@') > 0 ? + result.host.split('@') : false; + if (authInHost) { + result.auth = authInHost.shift(); + result.host = result.hostname = authInHost.shift(); + } + } + + mustEndAbs ||= (result.host && srcPath.length); + + if (mustEndAbs && !isAbsolute) { + srcPath.unshift(''); + } + + if (!srcPath.length) { + result.pathname = null; + result.path = null; + } else { + result.pathname = srcPath.join('/'); + } + + // To support request.http + if (result.pathname !== null || result.search !== null) { + result.path = (result.pathname ? result.pathname : '') + + (result.search ? result.search : ''); + } + result.auth = relative.auth || result.auth; + result.slashes ||= relative.slashes; + result.href = result.format(); + return result; +}; + +Url.prototype.parseHost = function parseHost() { + let host = this.host; + let port = portPattern.exec(host); + if (port) { + port = port[0]; + if (port !== ':') { + this.port = port.slice(1); + } + host = host.slice(0, host.length - port.length); + } + if (host) this.hostname = host; +}; + +// When used internally, we are not obligated to associate TypeError with +// this function, so non-strings can be rejected by underlying implementation. +// Public API has to validate input and throw appropriate error. +function pathToFileURL(path, options) { + validateString(path, 'path'); + + return _pathToFileURL(path, options); +} + +module.exports = { + // Original API + Url, + parse: urlParse, + resolve: urlResolve, + resolveObject: urlResolveObject, + format: urlFormat, + + // WHATWG API + URL, + URLPattern, + URLSearchParams, + domainToASCII, + domainToUnicode, + + // Utilities + pathToFileURL, + fileURLToPath, + fileURLToPathBuffer, + urlToHttpOptions, +}; \ No newline at end of file diff --git a/.codesandbox/node/util.js b/.codesandbox/node/util.js new file mode 100644 index 00000000..d458b0a2 --- /dev/null +++ b/.codesandbox/node/util.js @@ -0,0 +1,521 @@ +'use strict'; + +const { + ArrayIsArray, + ArrayPrototypePop, + ArrayPrototypePush, + ArrayPrototypeReduce, + Error, + ErrorCaptureStackTrace, + FunctionPrototypeBind, + NumberIsSafeInteger, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectGetOwnPropertyDescriptors, + ObjectKeys, + ObjectSetPrototypeOf, + ObjectValues, + ReflectApply, + RegExp, + RegExpPrototypeSymbolReplace, + StringPrototypeToWellFormed, +} = primordials; + +const { + ErrnoException, + ExceptionWithHostPort, + codes: { + ERR_FALSY_VALUE_REJECTION, + ERR_INVALID_ARG_TYPE, + ERR_OUT_OF_RANGE, + }, + isErrorStackTraceLimitWritable, +} = require('internal/errors'); +const { + format, + formatWithOptions, + inspect, + stripVTControlCharacters, +} = require('internal/util/inspect'); +const { debuglog } = require('internal/util/debuglog'); +const { + validateBoolean, + validateFunction, + validateNumber, + validateString, + validateOneOf, + validateObject, +} = require('internal/validators'); +const { + isReadableStream, + isWritableStream, + isNodeStream, +} = require('internal/streams/utils'); +const types = require('internal/util/types'); + +let utilColors; +function lazyUtilColors() { + utilColors ??= require('internal/util/colors'); + return utilColors; +} +const { getOptionValue } = require('internal/options'); + +const binding = internalBinding('util'); + +const { + deprecate, + getLazy, + getSystemErrorMap, + getSystemErrorName: internalErrorName, + getSystemErrorMessage: internalErrorMessage, + promisify, + defineLazyProperties, +} = require('internal/util'); + +let abortController; + +function lazyAbortController() { + abortController ??= require('internal/abort_controller'); + return abortController; +} + +let internalDeepEqual; + +/** + * @param {string} [code] + * @returns {string} + */ +function escapeStyleCode(code) { + if (code === undefined) return ''; + return `\u001b[${code}m`; +} + +/** + * @param {string | string[]} format + * @param {string} text + * @param {object} [options] + * @param {boolean} [options.validateStream] - Whether to validate the stream. + * @param {Stream} [options.stream] - The stream used for validation. + * @returns {string} + */ +function styleText(format, text, { validateStream = true, stream = process.stdout } = {}) { + validateString(text, 'text'); + validateBoolean(validateStream, 'options.validateStream'); + + let skipColorize; + if (validateStream) { + if ( + !isReadableStream(stream) && + !isWritableStream(stream) && + !isNodeStream(stream) + ) { + throw new ERR_INVALID_ARG_TYPE('stream', ['ReadableStream', 'WritableStream', 'Stream'], stream); + } + + // If the stream is falsy or should not be colorized, set skipColorize to true + skipColorize = !lazyUtilColors().shouldColorize(stream); + } + + // If the format is not an array, convert it to an array + const formatArray = ArrayIsArray(format) ? format : [format]; + + const codes = []; + for (const key of formatArray) { + if (key === 'none') continue; + const formatCodes = inspect.colors[key]; + // If the format is not a valid style, throw an error + if (formatCodes == null) { + validateOneOf(key, 'format', ObjectKeys(inspect.colors)); + } + if (skipColorize) continue; + ArrayPrototypePush(codes, formatCodes); + } + + if (skipColorize) { + return text; + } + + // Build opening codes + let openCodes = ''; + for (let i = 0; i < codes.length; i++) { + openCodes += escapeStyleCode(codes[i][0]); + } + + // Process the text to handle nested styles + let processedText; + if (codes.length > 0) { + processedText = ArrayPrototypeReduce( + codes, + (text, code) => RegExpPrototypeSymbolReplace( + // Find the reset code + new RegExp(`\\u001b\\[${code[1]}m`, 'g'), + text, + (match, offset) => { + // Check if there's more content after this reset + if (offset + match.length < text.length) { + if ( + code[0] === inspect.colors.dim[0] || + code[0] === inspect.colors.bold[0] + ) { + // Dim and bold are not mutually exclusive, so we need to reapply + return `${match}${escapeStyleCode(code[0])}`; + } + return escapeStyleCode(code[0]); + } + return match; + }, + ), + text, + ); + } else { + processedText = text; + } + + // Build closing codes in reverse order + let closeCodes = ''; + for (let i = codes.length - 1; i >= 0; i--) { + closeCodes += escapeStyleCode(codes[i][1]); + } + + return `${openCodes}${processedText}${closeCodes}`; +} + +/** + * Inherit the prototype methods from one constructor into another. + * + * The Function.prototype.inherits from lang.js rewritten as a standalone + * function (not on Function.prototype). NOTE: If this file is to be loaded + * during bootstrapping this function needs to be rewritten using some native + * functions as prototype setup using normal JavaScript does not work as + * expected during bootstrapping (see mirror.js in r114903). + * @param {Function} ctor Constructor function which needs to inherit the + * prototype. + * @param {Function} superCtor Constructor function to inherit prototype from. + * @throws {TypeError} Will error if either constructor is null, or if + * the super constructor lacks a prototype. + */ +function inherits(ctor, superCtor) { + + if (ctor === undefined || ctor === null) + throw new ERR_INVALID_ARG_TYPE('ctor', 'Function', ctor); + + if (superCtor === undefined || superCtor === null) + throw new ERR_INVALID_ARG_TYPE('superCtor', 'Function', superCtor); + + if (superCtor.prototype === undefined) { + throw new ERR_INVALID_ARG_TYPE('superCtor.prototype', + 'Object', superCtor.prototype); + } + ObjectDefineProperty(ctor, 'super_', { + __proto__: null, + value: superCtor, + writable: true, + configurable: true, + }); + ObjectSetPrototypeOf(ctor.prototype, superCtor.prototype); +} + +/** + * @deprecated since v6.0.0 + * @template T + * @template S + * @param {T} target + * @param {S} source + * @returns {(T & S) | null} + */ +function _extend(target, source) { + // Don't do anything if source isn't an object + if (source === null || typeof source !== 'object') return target; + + const keys = ObjectKeys(source); + let i = keys.length; + while (i--) { + target[keys[i]] = source[keys[i]]; + } + return target; +} + +const callbackifyOnRejected = (reason, cb) => { + // `!reason` guard inspired by bluebird (Ref: https://goo.gl/t5IS6M). + // Because `null` is a special error value in callbacks which means "no error + // occurred", we error-wrap so the callback consumer can distinguish between + // "the promise rejected with null" or "the promise fulfilled with undefined". + if (!reason) { + reason = new ERR_FALSY_VALUE_REJECTION.HideStackFramesError(reason); + ErrorCaptureStackTrace(reason, callbackifyOnRejected); + } + return cb(reason); +}; + +/** + * Converts a Promise-returning function to callback style + * @param {Function} original + * @returns {Function} + */ +function callbackify(original) { + validateFunction(original, 'original'); + + // We DO NOT return the promise as it gives the user a false sense that + // the promise is actually somehow related to the callback's execution + // and that the callback throwing will reject the promise. + function callbackified(...args) { + const maybeCb = ArrayPrototypePop(args); + validateFunction(maybeCb, 'last argument'); + const cb = FunctionPrototypeBind(maybeCb, this); + // In true node style we process the callback on `nextTick` with all the + // implications (stack, `uncaughtException`, `async_hooks`) + ReflectApply(original, this, args) + .then((ret) => process.nextTick(cb, null, ret), + (rej) => process.nextTick(callbackifyOnRejected, rej, cb)); + } + + const descriptors = ObjectGetOwnPropertyDescriptors(original); + // It is possible to manipulate a functions `length` or `name` property. This + // guards against the manipulation. + if (typeof descriptors.length.value === 'number') { + descriptors.length.value++; + } + if (typeof descriptors.name.value === 'string') { + descriptors.name.value += 'Callbackified'; + } + const propertiesValues = ObjectValues(descriptors); + for (let i = 0; i < propertiesValues.length; i++) { + // We want to use null-prototype objects to not rely on globally mutable + // %Object.prototype%. + ObjectSetPrototypeOf(propertiesValues[i], null); + } + ObjectDefineProperties(callbackified, descriptors); + return callbackified; +} + +/** + * @param {number} err + * @returns {string} + */ +function getSystemErrorMessage(err) { + validateNumber(err, 'err'); + if (err >= 0 || !NumberIsSafeInteger(err)) { + throw new ERR_OUT_OF_RANGE('err', 'a negative integer', err); + } + return internalErrorMessage(err); +} + +/** + * @param {number} err + * @returns {string} + */ +function getSystemErrorName(err) { + validateNumber(err, 'err'); + if (err >= 0 || !NumberIsSafeInteger(err)) { + throw new ERR_OUT_OF_RANGE('err', 'a negative integer', err); + } + return internalErrorName(err); +} + +function _errnoException(...args) { + if (isErrorStackTraceLimitWritable()) { + const limit = Error.stackTraceLimit; + Error.stackTraceLimit = 0; + const e = new ErrnoException(...args); + Error.stackTraceLimit = limit; + ErrorCaptureStackTrace(e, _exceptionWithHostPort); + return e; + } + return new ErrnoException(...args); +} + +function _exceptionWithHostPort(...args) { + if (isErrorStackTraceLimitWritable()) { + const limit = Error.stackTraceLimit; + Error.stackTraceLimit = 0; + const e = new ExceptionWithHostPort(...args); + Error.stackTraceLimit = limit; + ErrorCaptureStackTrace(e, _exceptionWithHostPort); + return e; + } + return new ExceptionWithHostPort(...args); +} + +/** + * Parses the content of a `.env` file. + * @param {string} content + * @returns {Record} + */ +function parseEnv(content) { + validateString(content, 'content'); + return binding.parseEnv(content); +} + +const lazySourceMap = getLazy(() => require('internal/source_map/source_map_cache')); + +/** + * @typedef {object} CallSite // The call site + * @property {string} scriptName // The name of the resource that contains the + * script for the function for this StackFrame + * @property {string} functionName // The name of the function associated with this stack frame + * @property {number} lineNumber // The number, 1-based, of the line for the associate function call + * @property {number} columnNumber // The 1-based column offset on the line for the associated function call + */ + +/** + * @param {CallSite} callSite // The call site object to reconstruct from source map + * @returns {CallSite | undefined} // The reconstructed call site object + */ +function reconstructCallSite(callSite) { + const { scriptName, lineNumber, columnNumber } = callSite; + const sourceMap = lazySourceMap().findSourceMap(scriptName); + if (!sourceMap) return; + const entry = sourceMap.findEntry(lineNumber - 1, columnNumber - 1); + if (!entry?.originalSource) return; + return { + __proto__: null, + // If the name is not found, it is an empty string to match the behavior of `util.getCallSite()` + functionName: entry.name ?? '', + scriptName: entry.originalSource, + lineNumber: entry.originalLine + 1, + column: entry.originalColumn + 1, + columnNumber: entry.originalColumn + 1, + }; +} + +/** + * + * The call site array to map + * @param {CallSite[]} callSites + * Array of objects with the reconstructed call site + * @returns {CallSite[]} + */ +function mapCallSite(callSites) { + const result = []; + for (let i = 0; i < callSites.length; ++i) { + const callSite = callSites[i]; + const found = reconstructCallSite(callSite); + ArrayPrototypePush(result, found ?? callSite); + } + return result; +} + +/** + * @typedef {object} CallSiteOptions // The call site options + * @property {boolean} sourceMap // Enable source map support + */ + +/** + * Returns the callSite + * @param {number} frameCount + * @param {CallSiteOptions} options + * @returns {CallSite[]} + */ +function getCallSites(frameCount = 10, options) { + // If options is not provided check if frameCount is an object + if (options === undefined) { + if (typeof frameCount === 'object') { + // If frameCount is an object, it is the options object + options = frameCount; + validateObject(options, 'options'); + if (options.sourceMap !== undefined) { + validateBoolean(options.sourceMap, 'options.sourceMap'); + } + frameCount = 10; + } else { + // If options is not provided, set it to an empty object + options = {}; + }; + } else { + // If options is provided, validate it + validateObject(options, 'options'); + if (options.sourceMap !== undefined) { + validateBoolean(options.sourceMap, 'options.sourceMap'); + } + } + + // Using kDefaultMaxCallStackSizeToCapture as reference + validateNumber(frameCount, 'frameCount', 1, 200); + // If options.sourceMaps is true or if sourceMaps are enabled but the option.sourceMaps is not set explictly to false + if (options.sourceMap === true || (getOptionValue('--enable-source-maps') && options.sourceMap !== false)) { + return mapCallSite(binding.getCallSites(frameCount)); + } + return binding.getCallSites(frameCount); +}; + +// Keep the `exports =` so that various functions can still be monkeypatched +module.exports = { + _errnoException, + _exceptionWithHostPort, + _extend: deprecate(_extend, + 'The `util._extend` API is deprecated. Please use Object.assign() instead.', + 'DEP0060'), + callbackify, + debug: debuglog, + debuglog, + deprecate, + format, + styleText, + formatWithOptions, + // Deprecated getCallSite. + // This API can be removed in next semver-minor release. + getCallSite: deprecate(getCallSites, + 'The `util.getCallSite` API has been renamed to `util.getCallSites()`.', + 'ExperimentalWarning'), + getCallSites, + getSystemErrorMap, + getSystemErrorName, + getSystemErrorMessage, + inherits, + inspect, + isArray: deprecate(ArrayIsArray, + 'The `util.isArray` API is deprecated. Please use `Array.isArray()` instead.', + 'DEP0044'), + isDeepStrictEqual(a, b, skipPrototype) { + if (internalDeepEqual === undefined) { + internalDeepEqual = require('internal/util/comparisons').isDeepStrictEqual; + } + return internalDeepEqual(a, b, skipPrototype); + }, + promisify, + stripVTControlCharacters, + toUSVString(input) { + return StringPrototypeToWellFormed(`${input}`); + }, + get transferableAbortSignal() { + return lazyAbortController().transferableAbortSignal; + }, + get transferableAbortController() { + return lazyAbortController().transferableAbortController; + }, + get aborted() { + return lazyAbortController().aborted; + }, + types, + parseEnv, +}; + +defineLazyProperties( + module.exports, + 'internal/util/parse_args/parse_args', + ['parseArgs'], +); + +defineLazyProperties( + module.exports, + 'internal/encoding', + ['TextDecoder', 'TextEncoder'], +); + +defineLazyProperties( + module.exports, + 'internal/mime', + ['MIMEType', 'MIMEParams'], +); + +defineLazyProperties( + module.exports, + 'internal/util/diff', + ['diff'], +); + +defineLazyProperties( + module.exports, + 'internal/util/trace_sigint', + ['setTraceSigInt'], +); \ No newline at end of file diff --git a/.codesandbox/node/v8.js b/.codesandbox/node/v8.js new file mode 100644 index 00000000..556ab9c4 --- /dev/null +++ b/.codesandbox/node/v8.js @@ -0,0 +1,481 @@ +// Copyright (c) 2014, StrongLoop Inc. +// +// Permission to use, copy, modify, and/or distribute this software for any +// purpose with or without fee is hereby granted, provided that the above +// copyright notice and this permission notice appear in all copies. +// +// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +'use strict'; + +const { + Array, + BigInt64Array, + BigUint64Array, + DataView, + Error, + Float32Array, + Float64Array, + Int16Array, + Int32Array, + Int8Array, + JSONParse, + ObjectPrototypeToString, + Uint16Array, + Uint32Array, + Uint8Array, + Uint8ClampedArray, + globalThis: { + Float16Array, + }, +} = primordials; + +const { Buffer } = require('buffer'); +const { + validateString, + validateUint32, + validateOneOf, +} = require('internal/validators'); +const { + Serializer, + Deserializer, +} = internalBinding('serdes'); +const { + namespace: startupSnapshot, +} = require('internal/v8/startup_snapshot'); + +let profiler = {}; +if (internalBinding('config').hasInspector) { + profiler = internalBinding('profiler'); +} + +const assert = require('internal/assert'); +const { inspect } = require('internal/util/inspect'); +const { FastBuffer } = require('internal/buffer'); +const { getValidatedPath } = require('internal/fs/utils'); +const { + createHeapSnapshotStream, + triggerHeapSnapshot, +} = internalBinding('heap_utils'); +const { + HeapSnapshotStream, + getHeapSnapshotOptions, + queryObjects, +} = require('internal/heap_utils'); +const promiseHooks = require('internal/promise_hooks'); +const { getOptionValue } = require('internal/options'); + +/** + * Generates a snapshot of the current V8 heap + * and writes it to a JSON file. + * @param {string} [filename] + * @param {{ + * exposeInternals?: boolean, + * exposeNumericValues?: boolean + * }} [options] + * @returns {string} + */ +function writeHeapSnapshot(filename, options) { + if (filename !== undefined) { + filename = getValidatedPath(filename); + } + const optionArray = getHeapSnapshotOptions(options); + return triggerHeapSnapshot(filename, optionArray); +} + +/** + * Generates a snapshot of the current V8 heap + * and returns a Readable Stream. + * @param {{ + * exposeInternals?: boolean, + * exposeNumericValues?: boolean + * }} [options] + * @returns {import('./stream.js').Readable} + */ +function getHeapSnapshot(options) { + const optionArray = getHeapSnapshotOptions(options); + const handle = createHeapSnapshotStream(optionArray); + assert(handle); + return new HeapSnapshotStream(handle); +} + +// We need to get the buffer from the binding at the callsite since +// it's re-initialized after deserialization. +const binding = internalBinding('v8'); + +const { + cachedDataVersionTag, + setFlagsFromString: _setFlagsFromString, + isStringOneByteRepresentation: _isStringOneByteRepresentation, + updateHeapStatisticsBuffer, + updateHeapSpaceStatisticsBuffer, + updateHeapCodeStatisticsBuffer, + setHeapSnapshotNearHeapLimit: _setHeapSnapshotNearHeapLimit, + + // Properties for heap statistics buffer extraction. + kTotalHeapSizeIndex, + kTotalHeapSizeExecutableIndex, + kTotalPhysicalSizeIndex, + kTotalAvailableSize, + kUsedHeapSizeIndex, + kHeapSizeLimitIndex, + kDoesZapGarbageIndex, + kMallocedMemoryIndex, + kPeakMallocedMemoryIndex, + kNumberOfNativeContextsIndex, + kNumberOfDetachedContextsIndex, + kTotalGlobalHandlesSizeIndex, + kUsedGlobalHandlesSizeIndex, + kExternalMemoryIndex, + + // Properties for heap spaces statistics buffer extraction. + kHeapSpaces, + kSpaceSizeIndex, + kSpaceUsedSizeIndex, + kSpaceAvailableSizeIndex, + kPhysicalSpaceSizeIndex, + + // Properties for heap code statistics buffer extraction. + kCodeAndMetadataSizeIndex, + kBytecodeAndMetadataSizeIndex, + kExternalScriptSourceSizeIndex, + kCPUProfilerMetaDataSizeIndex, + + heapStatisticsBuffer, + heapCodeStatisticsBuffer, + heapSpaceStatisticsBuffer, + getCppHeapStatistics: _getCppHeapStatistics, + detailLevel, +} = binding; + +const kNumberOfHeapSpaces = kHeapSpaces.length; + +/** + * Sets V8 command-line flags. + * @param {string} flags + * @returns {void} + */ +function setFlagsFromString(flags) { + validateString(flags, 'flags'); + _setFlagsFromString(flags); +} + +/** + * Return whether this string uses one byte as underlying representation or not. + * @param {string} content + * @returns {boolean} + */ +function isStringOneByteRepresentation(content) { + validateString(content, 'content'); + return _isStringOneByteRepresentation(content); +} + + +/** + * Gets the current V8 heap statistics. + * @returns {{ + * total_heap_size: number; + * total_heap_size_executable: number; + * total_physical_size: number; + * total_available_size: number; + * used_heap_size: number; + * heap_size_limit: number; + * malloced_memory: number; + * peak_malloced_memory: number; + * does_zap_garbage: number; + * number_of_native_contexts: number; + * number_of_detached_contexts: number; + * }} + */ +function getHeapStatistics() { + const buffer = heapStatisticsBuffer; + + updateHeapStatisticsBuffer(); + + return { + total_heap_size: buffer[kTotalHeapSizeIndex], + total_heap_size_executable: buffer[kTotalHeapSizeExecutableIndex], + total_physical_size: buffer[kTotalPhysicalSizeIndex], + total_available_size: buffer[kTotalAvailableSize], + used_heap_size: buffer[kUsedHeapSizeIndex], + heap_size_limit: buffer[kHeapSizeLimitIndex], + malloced_memory: buffer[kMallocedMemoryIndex], + peak_malloced_memory: buffer[kPeakMallocedMemoryIndex], + does_zap_garbage: buffer[kDoesZapGarbageIndex], + number_of_native_contexts: buffer[kNumberOfNativeContextsIndex], + number_of_detached_contexts: buffer[kNumberOfDetachedContextsIndex], + total_global_handles_size: buffer[kTotalGlobalHandlesSizeIndex], + used_global_handles_size: buffer[kUsedGlobalHandlesSizeIndex], + external_memory: buffer[kExternalMemoryIndex], + }; +} + +/** + * Gets the current V8 heap space statistics. + * @returns {{ + * space_name: string; + * space_size: number; + * space_used_size: number; + * space_available_size: number; + * physical_space_size: number; + * }[]} + */ +function getHeapSpaceStatistics() { + const heapSpaceStatistics = new Array(kNumberOfHeapSpaces); + const buffer = heapSpaceStatisticsBuffer; + + for (let i = 0; i < kNumberOfHeapSpaces; i++) { + updateHeapSpaceStatisticsBuffer(i); + heapSpaceStatistics[i] = { + space_name: kHeapSpaces[i], + space_size: buffer[kSpaceSizeIndex], + space_used_size: buffer[kSpaceUsedSizeIndex], + space_available_size: buffer[kSpaceAvailableSizeIndex], + physical_space_size: buffer[kPhysicalSpaceSizeIndex], + }; + } + + return heapSpaceStatistics; +} + +/** + * Gets the current V8 heap code statistics. + * @returns {{ + * code_and_metadata_size: number; + * bytecode_and_metadata_size: number; + * external_script_source_size: number; + * cpu_profiler_metadata_size: number; + * }} + */ +function getHeapCodeStatistics() { + const buffer = heapCodeStatisticsBuffer; + + updateHeapCodeStatisticsBuffer(); + return { + code_and_metadata_size: buffer[kCodeAndMetadataSizeIndex], + bytecode_and_metadata_size: buffer[kBytecodeAndMetadataSizeIndex], + external_script_source_size: buffer[kExternalScriptSourceSizeIndex], + cpu_profiler_metadata_size: buffer[kCPUProfilerMetaDataSizeIndex], + }; +} + +let heapSnapshotNearHeapLimitCallbackAdded = false; +function setHeapSnapshotNearHeapLimit(limit) { + validateUint32(limit, 'limit', true); + if (heapSnapshotNearHeapLimitCallbackAdded || + getOptionValue('--heapsnapshot-near-heap-limit') > 0 + ) { + return; + } + heapSnapshotNearHeapLimitCallbackAdded = true; + _setHeapSnapshotNearHeapLimit(limit); +} + +const detailLevelDict = { + __proto__: null, + detailed: detailLevel.DETAILED, + brief: detailLevel.BRIEF, +}; + +function getCppHeapStatistics(type = 'detailed') { + validateOneOf(type, 'type', ['brief', 'detailed']); + const result = _getCppHeapStatistics(detailLevelDict[type]); + result.detail_level = type; + return result; +} + +/* V8 serialization API */ + +/* JS methods for the base objects */ +Serializer.prototype._getDataCloneError = Error; + +/** + * Reads raw bytes from the deserializer's internal buffer. + * @param {number} length + * @returns {Buffer} + */ +Deserializer.prototype.readRawBytes = function readRawBytes(length) { + const offset = this._readRawBytes(length); + // `this.buffer` can be a Buffer or a plain Uint8Array, so just calling + // `.slice()` doesn't work. + return new FastBuffer(this.buffer.buffer, + this.buffer.byteOffset + offset, + length); +}; + +function arrayBufferViewTypeToIndex(abView) { + const type = ObjectPrototypeToString(abView); + if (type === '[object Int8Array]') return 0; + if (type === '[object Uint8Array]') return 1; + if (type === '[object Uint8ClampedArray]') return 2; + if (type === '[object Int16Array]') return 3; + if (type === '[object Uint16Array]') return 4; + if (type === '[object Int32Array]') return 5; + if (type === '[object Uint32Array]') return 6; + if (type === '[object Float32Array]') return 7; + if (type === '[object Float64Array]') return 8; + if (type === '[object DataView]') return 9; + // Index 10 is FastBuffer. + if (type === '[object BigInt64Array]') return 11; + if (type === '[object BigUint64Array]') return 12; + if (type === '[object Float16Array]') return 13; + return -1; +} + +function arrayBufferViewIndexToType(index) { + if (index === 0) return Int8Array; + if (index === 1) return Uint8Array; + if (index === 2) return Uint8ClampedArray; + if (index === 3) return Int16Array; + if (index === 4) return Uint16Array; + if (index === 5) return Int32Array; + if (index === 6) return Uint32Array; + if (index === 7) return Float32Array; + if (index === 8) return Float64Array; + if (index === 9) return DataView; + if (index === 10) return FastBuffer; + if (index === 11) return BigInt64Array; + if (index === 12) return BigUint64Array; + if (index === 13) return Float16Array; + return undefined; +} + +class DefaultSerializer extends Serializer { + constructor() { + super(); + + this._setTreatArrayBufferViewsAsHostObjects(true); + } + + /** + * Used to write some kind of host object, i.e. an + * object that is created by native C++ bindings. + * @param {object} abView + * @returns {void} + */ + _writeHostObject(abView) { + // Keep track of how to handle different ArrayBufferViews. The default + // Serializer for Node does not use the V8 methods for serializing those + // objects because Node's `Buffer` objects use pooled allocation in many + // cases, and their underlying `ArrayBuffer`s would show up in the + // serialization. Because a) those may contain sensitive data and the user + // may not be aware of that and b) they are often much larger than the + // `Buffer` itself, custom serialization is applied. + let i = 10; // FastBuffer + if (abView.constructor !== Buffer) { + i = arrayBufferViewTypeToIndex(abView); + if (i === -1) { + throw new this._getDataCloneError( + `Unserializable host object: ${inspect(abView)}`); + } + } + this.writeUint32(i); + this.writeUint32(abView.byteLength); + this.writeRawBytes(new Uint8Array(abView.buffer, + abView.byteOffset, + abView.byteLength)); + } +} + +class DefaultDeserializer extends Deserializer { + /** + * Used to read some kind of host object, i.e. an + * object that is created by native C++ bindings. + * @returns {any} + */ + _readHostObject() { + const typeIndex = this.readUint32(); + const ctor = arrayBufferViewIndexToType(typeIndex); + const byteLength = this.readUint32(); + const byteOffset = this._readRawBytes(byteLength); + const BYTES_PER_ELEMENT = ctor.BYTES_PER_ELEMENT || 1; + + const offset = this.buffer.byteOffset + byteOffset; + if (offset % BYTES_PER_ELEMENT === 0) { + return new ctor(this.buffer.buffer, + offset, + byteLength / BYTES_PER_ELEMENT); + } + // Copy to an aligned buffer first. + const buffer_copy = Buffer.allocUnsafe(byteLength); + buffer_copy.set(new Uint8Array(this.buffer.buffer, this.buffer.byteOffset + byteOffset, byteLength)); + return new ctor(buffer_copy.buffer, + buffer_copy.byteOffset, + byteLength / BYTES_PER_ELEMENT); + } +} + +/** + * Uses a `DefaultSerializer` to serialize `value` + * into a buffer. + * @param {any} value + * @returns {Buffer} + */ +function serialize(value) { + const ser = new DefaultSerializer(); + ser.writeHeader(); + ser.writeValue(value); + return ser.releaseBuffer(); +} + +/** + * Uses a `DefaultDeserializer` with default options + * to read a JavaScript value from a buffer. + * @param {Buffer | TypedArray | DataView} buffer + * @returns {any} + */ +function deserialize(buffer) { + const der = new DefaultDeserializer(buffer); + der.readHeader(); + return der.readValue(); +} + +class GCProfiler { + #profiler = null; + + start() { + if (!this.#profiler) { + this.#profiler = new binding.GCProfiler(); + this.#profiler.start(); + } + } + + stop() { + if (this.#profiler) { + const data = this.#profiler.stop(); + this.#profiler = null; + return JSONParse(data); + } + } +} + +module.exports = { + cachedDataVersionTag, + getHeapSnapshot, + getHeapStatistics, + getHeapSpaceStatistics, + getHeapCodeStatistics, + getCppHeapStatistics, + setFlagsFromString, + Serializer, + Deserializer, + DefaultSerializer, + DefaultDeserializer, + deserialize, + takeCoverage: profiler.takeCoverage, + stopCoverage: profiler.stopCoverage, + serialize, + writeHeapSnapshot, + promiseHooks, + queryObjects, + startupSnapshot, + setHeapSnapshotNearHeapLimit, + GCProfiler, + isStringOneByteRepresentation, +}; \ No newline at end of file diff --git a/.codesandbox/node/vm.js b/.codesandbox/node/vm.js new file mode 100644 index 00000000..8b674831 --- /dev/null +++ b/.codesandbox/node/vm.js @@ -0,0 +1,655 @@ +<<<<<<< HEAD +'use strict'; +======= +"use strict"; +>>>>>>> refs/remotes/origin/master + +const { + ArrayPrototypeForEach, + ObjectFreeze, + PromiseReject, + ReflectApply, + Symbol, +} = primordials; + +const { + ContextifyScript, + makeContext, + constants, + measureMemory: _measureMemory, +<<<<<<< HEAD +} = internalBinding('contextify'); +const { + ERR_CONTEXT_NOT_INITIALIZED, + ERR_INVALID_ARG_TYPE, +} = require('internal/errors').codes; +======= +} = internalBinding("contextify"); +const { ERR_CONTEXT_NOT_INITIALIZED, ERR_INVALID_ARG_TYPE } = + require("internal/errors").codes; +>>>>>>> refs/remotes/origin/master +const { + validateArray, + validateBoolean, + validateBuffer, + validateInt32, + validateOneOf, + validateObject, + validateString, + validateStringArray, + validateUint32, + kValidateObjectAllowArray, + kValidateObjectAllowNullable, +<<<<<<< HEAD +} = require('internal/validators'); +======= +} = require("internal/validators"); +>>>>>>> refs/remotes/origin/master +const { + emitExperimentalWarning, + kEmptyObject, + kVmBreakFirstLineSymbol, +<<<<<<< HEAD +} = require('internal/util'); +======= +} = require("internal/util"); +>>>>>>> refs/remotes/origin/master +const { + getHostDefinedOptionId, + internalCompileFunction, + isContext: _isContext, + registerImportModuleDynamically, +<<<<<<< HEAD +} = require('internal/vm'); +const { + vm_dynamic_import_main_context_default, + vm_context_no_contextify, +} = internalBinding('symbols'); +const kParsingContext = Symbol('script parsing context'); +======= +} = require("internal/vm"); +const { vm_dynamic_import_main_context_default, vm_context_no_contextify } = + internalBinding("symbols"); +const kParsingContext = Symbol("script parsing context"); +>>>>>>> refs/remotes/origin/master + +/** + * Check if object is a context object created by vm.createContext(). + * @throws {TypeError} If object is not an object in the first place, throws TypeError. + * @param {object} object Object to check. + * @returns {boolean} + */ +function isContext(object) { +<<<<<<< HEAD + validateObject(object, 'object', kValidateObjectAllowArray); +======= + validateObject(object, "object", kValidateObjectAllowArray); +>>>>>>> refs/remotes/origin/master + + return _isContext(object); +} + +class Script extends ContextifyScript { + constructor(code, options = kEmptyObject) { + code = `${code}`; +<<<<<<< HEAD + if (typeof options === 'string') { + options = { filename: options }; + } else { + validateObject(options, 'options'); + } + + const { + filename = 'evalmachine.', +======= + if (typeof options === "string") { + options = { filename: options }; + } else { + validateObject(options, "options"); + } + + const { + filename = "evalmachine.", +>>>>>>> refs/remotes/origin/master + lineOffset = 0, + columnOffset = 0, + cachedData, + produceCachedData = false, + importModuleDynamically, + [kParsingContext]: parsingContext, + } = options; + +<<<<<<< HEAD + validateString(filename, 'options.filename'); + validateInt32(lineOffset, 'options.lineOffset'); + validateInt32(columnOffset, 'options.columnOffset'); + if (cachedData !== undefined) { + validateBuffer(cachedData, 'options.cachedData'); + } + validateBoolean(produceCachedData, 'options.produceCachedData'); + + const hostDefinedOptionId = + getHostDefinedOptionId(importModuleDynamically, filename); + // Calling `ReThrow()` on a native TryCatch does not generate a new + // abort-on-uncaught-exception check. A dummy try/catch in JS land + // protects against that. + try { // eslint-disable-line no-useless-catch + super(code, + filename, + lineOffset, + columnOffset, + cachedData, + produceCachedData, + parsingContext, + hostDefinedOptionId); +======= + validateString(filename, "options.filename"); + validateInt32(lineOffset, "options.lineOffset"); + validateInt32(columnOffset, "options.columnOffset"); + if (cachedData !== undefined) { + validateBuffer(cachedData, "options.cachedData"); + } + validateBoolean(produceCachedData, "options.produceCachedData"); + + const hostDefinedOptionId = getHostDefinedOptionId( + importModuleDynamically, + filename + ); + // Calling `ReThrow()` on a native TryCatch does not generate a new + // abort-on-uncaught-exception check. A dummy try/catch in JS land + // protects against that. + try { + // eslint-disable-line no-useless-catch + super( + code, + filename, + lineOffset, + columnOffset, + cachedData, + produceCachedData, + parsingContext, + hostDefinedOptionId + ); +>>>>>>> refs/remotes/origin/master + } catch (e) { + throw e; /* node-do-not-add-exception-line */ + } + + registerImportModuleDynamically(this, importModuleDynamically); + } + + runInThisContext(options) { + const { breakOnSigint, args } = getRunInContextArgs(null, options); +<<<<<<< HEAD + if (breakOnSigint && process.listenerCount('SIGINT') > 0) { +======= + if (breakOnSigint && process.listenerCount("SIGINT") > 0) { +>>>>>>> refs/remotes/origin/master + return sigintHandlersWrap(super.runInContext, this, args); + } + return ReflectApply(super.runInContext, this, args); + } + + runInContext(contextifiedObject, options) { + validateContext(contextifiedObject); + const { breakOnSigint, args } = getRunInContextArgs( + contextifiedObject, +<<<<<<< HEAD + options, + ); + if (breakOnSigint && process.listenerCount('SIGINT') > 0) { +======= + options + ); + if (breakOnSigint && process.listenerCount("SIGINT") > 0) { +>>>>>>> refs/remotes/origin/master + return sigintHandlersWrap(super.runInContext, this, args); + } + return ReflectApply(super.runInContext, this, args); + } + + runInNewContext(contextObject, options) { + const context = createContext(contextObject, getContextOptions(options)); + return this.runInContext(context, options); + } +} + +function validateContext(contextifiedObject) { + if (!isContext(contextifiedObject)) { +<<<<<<< HEAD + throw new ERR_INVALID_ARG_TYPE('contextifiedObject', 'vm.Context', + contextifiedObject); +======= + throw new ERR_INVALID_ARG_TYPE( + "contextifiedObject", + "vm.Context", + contextifiedObject + ); +>>>>>>> refs/remotes/origin/master + } +} + +function getRunInContextArgs(contextifiedObject, options = kEmptyObject) { +<<<<<<< HEAD + validateObject(options, 'options'); +======= + validateObject(options, "options"); +>>>>>>> refs/remotes/origin/master + + let timeout = options.timeout; + if (timeout === undefined) { + timeout = -1; + } else { +<<<<<<< HEAD + validateUint32(timeout, 'options.timeout', true); +======= + validateUint32(timeout, "options.timeout", true); +>>>>>>> refs/remotes/origin/master + } + + const { + displayErrors = true, + breakOnSigint = false, + [kVmBreakFirstLineSymbol]: breakFirstLine = false, + } = options; + +<<<<<<< HEAD + validateBoolean(displayErrors, 'options.displayErrors'); + validateBoolean(breakOnSigint, 'options.breakOnSigint'); +======= + validateBoolean(displayErrors, "options.displayErrors"); + validateBoolean(breakOnSigint, "options.breakOnSigint"); +>>>>>>> refs/remotes/origin/master + + return { + breakOnSigint, + args: [ + contextifiedObject, + timeout, + displayErrors, + breakOnSigint, + breakFirstLine, + ], + }; +} + +function getContextOptions(options) { +<<<<<<< HEAD + if (!options) + return {}; +======= + if (!options) return {}; +>>>>>>> refs/remotes/origin/master + const contextOptions = { + name: options.contextName, + origin: options.contextOrigin, + codeGeneration: undefined, + microtaskMode: options.microtaskMode, + }; + if (contextOptions.name !== undefined) +<<<<<<< HEAD + validateString(contextOptions.name, 'options.contextName'); + if (contextOptions.origin !== undefined) + validateString(contextOptions.origin, 'options.contextOrigin'); + if (options.contextCodeGeneration !== undefined) { + validateObject(options.contextCodeGeneration, + 'options.contextCodeGeneration'); + const { strings, wasm } = options.contextCodeGeneration; + if (strings !== undefined) + validateBoolean(strings, 'options.contextCodeGeneration.strings'); + if (wasm !== undefined) + validateBoolean(wasm, 'options.contextCodeGeneration.wasm'); + contextOptions.codeGeneration = { strings, wasm }; + } + if (options.microtaskMode !== undefined) + validateString(options.microtaskMode, 'options.microtaskMode'); +======= + validateString(contextOptions.name, "options.contextName"); + if (contextOptions.origin !== undefined) + validateString(contextOptions.origin, "options.contextOrigin"); + if (options.contextCodeGeneration !== undefined) { + validateObject( + options.contextCodeGeneration, + "options.contextCodeGeneration" + ); + const { strings, wasm } = options.contextCodeGeneration; + if (strings !== undefined) + validateBoolean(strings, "options.contextCodeGeneration.strings"); + if (wasm !== undefined) + validateBoolean(wasm, "options.contextCodeGeneration.wasm"); + contextOptions.codeGeneration = { strings, wasm }; + } + if (options.microtaskMode !== undefined) + validateString(options.microtaskMode, "options.microtaskMode"); +>>>>>>> refs/remotes/origin/master + return contextOptions; +} + +let defaultContextNameIndex = 1; +function createContext(contextObject = {}, options = kEmptyObject) { + if (contextObject !== vm_context_no_contextify && isContext(contextObject)) { + return contextObject; + } + +<<<<<<< HEAD + validateObject(options, 'options'); +======= + validateObject(options, "options"); +>>>>>>> refs/remotes/origin/master + + const { + name = `VM Context ${defaultContextNameIndex++}`, + origin, + codeGeneration, + microtaskMode, + importModuleDynamically, + } = options; + +<<<<<<< HEAD + validateString(name, 'options.name'); + if (origin !== undefined) + validateString(origin, 'options.origin'); + if (codeGeneration !== undefined) + validateObject(codeGeneration, 'options.codeGeneration'); +======= + validateString(name, "options.name"); + if (origin !== undefined) validateString(origin, "options.origin"); + if (codeGeneration !== undefined) + validateObject(codeGeneration, "options.codeGeneration"); +>>>>>>> refs/remotes/origin/master + + let strings = true; + let wasm = true; + if (codeGeneration !== undefined) { + ({ strings = true, wasm = true } = codeGeneration); +<<<<<<< HEAD + validateBoolean(strings, 'options.codeGeneration.strings'); + validateBoolean(wasm, 'options.codeGeneration.wasm'); + } + + validateOneOf(microtaskMode, + 'options.microtaskMode', + ['afterEvaluate', undefined]); + const microtaskQueue = (microtaskMode === 'afterEvaluate'); + + const hostDefinedOptionId = + getHostDefinedOptionId(importModuleDynamically, name); + + const result = makeContext(contextObject, name, origin, strings, wasm, microtaskQueue, hostDefinedOptionId); +======= + validateBoolean(strings, "options.codeGeneration.strings"); + validateBoolean(wasm, "options.codeGeneration.wasm"); + } + + validateOneOf(microtaskMode, "options.microtaskMode", [ + "afterEvaluate", + undefined, + ]); + const microtaskQueue = microtaskMode === "afterEvaluate"; + + const hostDefinedOptionId = getHostDefinedOptionId( + importModuleDynamically, + name + ); + + const result = makeContext( + contextObject, + name, + origin, + strings, + wasm, + microtaskQueue, + hostDefinedOptionId + ); +>>>>>>> refs/remotes/origin/master + // Register the context scope callback after the context was initialized. + registerImportModuleDynamically(result, importModuleDynamically); + return result; +} + +function createScript(code, options) { + return new Script(code, options); +} + +// Remove all SIGINT listeners and re-attach them after the wrapped function +// has executed, so that caught SIGINT are handled by the listeners again. +function sigintHandlersWrap(fn, thisArg, argsArray) { +<<<<<<< HEAD + const sigintListeners = process.rawListeners('SIGINT'); + + process.removeAllListeners('SIGINT'); +======= + const sigintListeners = process.rawListeners("SIGINT"); + + process.removeAllListeners("SIGINT"); +>>>>>>> refs/remotes/origin/master + + try { + return ReflectApply(fn, thisArg, argsArray); + } finally { + // Add using the public methods so that the `newListener` handler of + // process can re-attach the listeners. + ArrayPrototypeForEach(sigintListeners, (listener) => { +<<<<<<< HEAD + process.addListener('SIGINT', listener); +======= + process.addListener("SIGINT", listener); +>>>>>>> refs/remotes/origin/master + }); + } +} + +function runInContext(code, contextifiedObject, options) { + validateContext(contextifiedObject); +<<<<<<< HEAD + if (typeof options === 'string') { +======= + if (typeof options === "string") { +>>>>>>> refs/remotes/origin/master + options = { + filename: options, + [kParsingContext]: contextifiedObject, + }; + } else { + options = { ...options, [kParsingContext]: contextifiedObject }; + } +<<<<<<< HEAD + return createScript(code, options) + .runInContext(contextifiedObject, options); +} + +function runInNewContext(code, contextObject, options) { + if (typeof options === 'string') { +======= + return createScript(code, options).runInContext(contextifiedObject, options); +} + +function runInNewContext(code, contextObject, options) { + if (typeof options === "string") { +>>>>>>> refs/remotes/origin/master + options = { filename: options }; + } + contextObject = createContext(contextObject, getContextOptions(options)); + options = { ...options, [kParsingContext]: contextObject }; + return createScript(code, options).runInNewContext(contextObject, options); +} + +function runInThisContext(code, options) { +<<<<<<< HEAD + if (typeof options === 'string') { +======= + if (typeof options === "string") { +>>>>>>> refs/remotes/origin/master + options = { filename: options }; + } + return createScript(code, options).runInThisContext(options); +} + +function compileFunction(code, params, options = kEmptyObject) { +<<<<<<< HEAD + validateString(code, 'code'); + validateObject(options, 'options'); + if (params !== undefined) { + validateStringArray(params, 'params'); + } + const { + filename = '', +======= + validateString(code, "code"); + validateObject(options, "options"); + if (params !== undefined) { + validateStringArray(params, "params"); + } + const { + filename = "", +>>>>>>> refs/remotes/origin/master + columnOffset = 0, + lineOffset = 0, + cachedData = undefined, + produceCachedData = false, + parsingContext = undefined, + contextExtensions = [], + importModuleDynamically, + } = options; + +<<<<<<< HEAD + validateString(filename, 'options.filename'); + validateInt32(columnOffset, 'options.columnOffset'); + validateInt32(lineOffset, 'options.lineOffset'); + if (cachedData !== undefined) + validateBuffer(cachedData, 'options.cachedData'); + validateBoolean(produceCachedData, 'options.produceCachedData'); + if (parsingContext !== undefined) { + if ( + typeof parsingContext !== 'object' || +======= + validateString(filename, "options.filename"); + validateInt32(columnOffset, "options.columnOffset"); + validateInt32(lineOffset, "options.lineOffset"); + if (cachedData !== undefined) + validateBuffer(cachedData, "options.cachedData"); + validateBoolean(produceCachedData, "options.produceCachedData"); + if (parsingContext !== undefined) { + if ( + typeof parsingContext !== "object" || +>>>>>>> refs/remotes/origin/master + parsingContext === null || + !isContext(parsingContext) + ) { + throw new ERR_INVALID_ARG_TYPE( +<<<<<<< HEAD + 'options.parsingContext', + 'Context', + parsingContext, + ); + } + } + validateArray(contextExtensions, 'options.contextExtensions'); +======= + "options.parsingContext", + "Context", + parsingContext + ); + } + } + validateArray(contextExtensions, "options.contextExtensions"); +>>>>>>> refs/remotes/origin/master + ArrayPrototypeForEach(contextExtensions, (extension, i) => { + const name = `options.contextExtensions[${i}]`; + validateObject(extension, name, kValidateObjectAllowNullable); + }); + +<<<<<<< HEAD + const hostDefinedOptionId = + getHostDefinedOptionId(importModuleDynamically, filename); + + return internalCompileFunction( + code, filename, lineOffset, columnOffset, + cachedData, produceCachedData, parsingContext, contextExtensions, + params, hostDefinedOptionId, importModuleDynamically, +======= + const hostDefinedOptionId = getHostDefinedOptionId( + importModuleDynamically, + filename + ); + + return internalCompileFunction( + code, + filename, + lineOffset, + columnOffset, + cachedData, + produceCachedData, + parsingContext, + contextExtensions, + params, + hostDefinedOptionId, + importModuleDynamically +>>>>>>> refs/remotes/origin/master + ).function; +} + +const measureMemoryModes = { + summary: constants.measureMemory.mode.SUMMARY, + detailed: constants.measureMemory.mode.DETAILED, +}; + +const measureMemoryExecutions = { + default: constants.measureMemory.execution.DEFAULT, + eager: constants.measureMemory.execution.EAGER, +}; + +function measureMemory(options = kEmptyObject) { +<<<<<<< HEAD + emitExperimentalWarning('vm.measureMemory'); + validateObject(options, 'options'); + const { mode = 'summary', execution = 'default' } = options; + validateOneOf(mode, 'options.mode', ['summary', 'detailed']); + validateOneOf(execution, 'options.execution', ['default', 'eager']); + const result = _measureMemory(measureMemoryModes[mode], + measureMemoryExecutions[execution]); +======= + emitExperimentalWarning("vm.measureMemory"); + validateObject(options, "options"); + const { mode = "summary", execution = "default" } = options; + validateOneOf(mode, "options.mode", ["summary", "detailed"]); + validateOneOf(execution, "options.execution", ["default", "eager"]); + const result = _measureMemory( + measureMemoryModes[mode], + measureMemoryExecutions[execution] + ); +>>>>>>> refs/remotes/origin/master + if (result === undefined) { + return PromiseReject(new ERR_CONTEXT_NOT_INITIALIZED()); + } + return result; +} + +const vmConstants = { + __proto__: null, + USE_MAIN_CONTEXT_DEFAULT_LOADER: vm_dynamic_import_main_context_default, + DONT_CONTEXTIFY: vm_context_no_contextify, +}; + +ObjectFreeze(vmConstants); + +module.exports = { + Script, + createContext, + createScript, + runInContext, + runInNewContext, + runInThisContext, + isContext, + compileFunction, + measureMemory, + constants: vmConstants, +}; + +// The vm module is patched to include vm.Module, vm.SourceTextModule +// and vm.SyntheticModule in the pre-execution phase when +<<<<<<< HEAD +// --experimental-vm-modules is on. +======= +// --experimental-vm-modules is on. +>>>>>>> refs/remotes/origin/master diff --git a/.codesandbox/node/wasi.js b/.codesandbox/node/wasi.js new file mode 100644 index 00000000..71dbc60a --- /dev/null +++ b/.codesandbox/node/wasi.js @@ -0,0 +1,176 @@ +'use strict'; +const { + ArrayPrototypeForEach, + ArrayPrototypeMap, + ArrayPrototypePush, + FunctionPrototypeBind, + ObjectEntries, + String, + Symbol, +} = primordials; + +const { + ERR_INVALID_ARG_VALUE, + ERR_WASI_ALREADY_STARTED, +} = require('internal/errors').codes; +const { + emitExperimentalWarning, + kEmptyObject, +} = require('internal/util'); +const { + validateArray, + validateBoolean, + validateFunction, + validateInt32, + validateObject, + validateString, + validateUndefined, +} = require('internal/validators'); +const kExitCode = Symbol('kExitCode'); +const kSetMemory = Symbol('kSetMemory'); +const kStarted = Symbol('kStarted'); +const kInstance = Symbol('kInstance'); +const kBindingName = Symbol('kBindingName'); + +emitExperimentalWarning('WASI'); + +class WASI { + constructor(options = kEmptyObject) { + validateObject(options, 'options'); + + let _WASI; + validateString(options.version, 'options.version'); + switch (options.version) { + case 'unstable': + ({ WASI: _WASI } = internalBinding('wasi')); + this[kBindingName] = 'wasi_unstable'; + break; + case 'preview1': + ({ WASI: _WASI } = internalBinding('wasi')); + this[kBindingName] = 'wasi_snapshot_preview1'; + break; + // When adding support for additional wasi versions add case here + default: + throw new ERR_INVALID_ARG_VALUE('options.version', + options.version, + 'unsupported WASI version'); + } + + if (options.args !== undefined) + validateArray(options.args, 'options.args'); + const args = ArrayPrototypeMap(options.args || [], String); + + const env = []; + if (options.env !== undefined) { + validateObject(options.env, 'options.env'); + ArrayPrototypeForEach( + ObjectEntries(options.env), + ({ 0: key, 1: value }) => { + if (value !== undefined) + ArrayPrototypePush(env, `${key}=${value}`); + }); + } + + const preopens = []; + if (options.preopens !== undefined) { + validateObject(options.preopens, 'options.preopens'); + ArrayPrototypeForEach( + ObjectEntries(options.preopens), + ({ 0: key, 1: value }) => + ArrayPrototypePush(preopens, String(key), String(value)), + ); + } + + const { stdin = 0, stdout = 1, stderr = 2 } = options; + validateInt32(stdin, 'options.stdin', 0); + validateInt32(stdout, 'options.stdout', 0); + validateInt32(stderr, 'options.stderr', 0); + const stdio = [stdin, stdout, stderr]; + + const wrap = new _WASI(args, env, preopens, stdio); + + for (const prop in wrap) { + wrap[prop] = FunctionPrototypeBind(wrap[prop], wrap); + } + + let returnOnExit = true; + if (options.returnOnExit !== undefined) { + validateBoolean(options.returnOnExit, 'options.returnOnExit'); + returnOnExit = options.returnOnExit; + } + if (returnOnExit) + wrap.proc_exit = FunctionPrototypeBind(wasiReturnOnProcExit, this); + + this[kSetMemory] = wrap._setMemory; + delete wrap._setMemory; + this.wasiImport = wrap; + this[kStarted] = false; + this[kExitCode] = 0; + this[kInstance] = undefined; + } + + finalizeBindings(instance, { + memory = instance?.exports?.memory, + } = {}) { + if (this[kStarted]) { + throw new ERR_WASI_ALREADY_STARTED(); + } + + validateObject(instance, 'instance'); + validateObject(instance.exports, 'instance.exports'); + + this[kSetMemory](memory); + + this[kInstance] = instance; + this[kStarted] = true; + } + + // Must not export _initialize, must export _start + start(instance) { + this.finalizeBindings(instance); + + const { _start, _initialize } = this[kInstance].exports; + + validateFunction(_start, 'instance.exports._start'); + validateUndefined(_initialize, 'instance.exports._initialize'); + + try { + _start(); + } catch (err) { + if (err !== kExitCode) { + throw err; + } + } + + return this[kExitCode]; + } + + // Must not export _start, may optionally export _initialize + initialize(instance) { + this.finalizeBindings(instance); + + const { _start, _initialize } = this[kInstance].exports; + + validateUndefined(_start, 'instance.exports._start'); + if (_initialize !== undefined) { + validateFunction(_initialize, 'instance.exports._initialize'); + _initialize(); + } + } + + getImportObject() { + return { [this[kBindingName]]: this.wasiImport }; + } +} + +module.exports = { WASI }; + + +function wasiReturnOnProcExit(rval) { + // If __wasi_proc_exit() does not terminate the process, an assertion is + // triggered in the wasm runtime. Node can sidestep the assertion and return + // an exit code by recording the exit code, and throwing a JavaScript + // exception that WebAssembly cannot catch. + this[kExitCode] = rval; + throw kExitCode; +} \ No newline at end of file diff --git a/.codesandbox/node/worker_pool.js b/.codesandbox/node/worker_pool.js new file mode 100644 index 00000000..bc0e59c7 --- /dev/null +++ b/.codesandbox/node/worker_pool.js @@ -0,0 +1,13 @@ +const WorkerPool = require('./worker_pool.js'); +const os = require('node:os'); + +const pool = new WorkerPool(os.availableParallelism()); + +let finished = 0; +for (let i = 0; i < 10; i++) { + pool.runTask({ a: 42, b: 100 }, (err, result) => { + console.log(i, err, result); + if (++finished === 10) + pool.close(); + }); +} \ No newline at end of file diff --git a/.codesandbox/node/worker_threads.js b/.codesandbox/node/worker_threads.js new file mode 100644 index 00000000..a8e42ebb --- /dev/null +++ b/.codesandbox/node/worker_threads.js @@ -0,0 +1,81 @@ +<<<<<<< HEAD +'use strict'; +======= +"use strict"; +>>>>>>> refs/remotes/origin/master + +const { + isInternalThread, + isMainThread, + SHARE_ENV, + resourceLimits, + setEnvironmentData, + getEnvironmentData, + threadId, + threadName, + Worker, +<<<<<<< HEAD +} = require('internal/worker'); +======= +} = require("internal/worker"); +>>>>>>> refs/remotes/origin/master + +const { + MessagePort, + MessageChannel, + markAsUncloneable, + moveMessagePortToContext, + receiveMessageOnPort, + BroadcastChannel, +<<<<<<< HEAD +} = require('internal/worker/io'); + +const { + postMessageToThread, +} = require('internal/worker/messaging'); +======= +} = require("internal/worker/io"); + +const { postMessageToThread } = require("internal/worker/messaging"); +>>>>>>> refs/remotes/origin/master + +const { + markAsUntransferable, + isMarkedAsUntransferable, +<<<<<<< HEAD +} = require('internal/buffer'); + +const { locks } = require('internal/locks'); +======= +} = require("internal/buffer"); + +const { locks } = require("internal/locks"); +>>>>>>> refs/remotes/origin/master + +module.exports = { + isInternalThread, + isMainThread, + MessagePort, + MessageChannel, + markAsUncloneable, + markAsUntransferable, + isMarkedAsUntransferable, + moveMessagePortToContext, + receiveMessageOnPort, + resourceLimits, + postMessageToThread, + threadId, + threadName, + SHARE_ENV, + Worker, + parentPort: null, + workerData: null, + BroadcastChannel, + setEnvironmentData, + getEnvironmentData, + locks, +<<<<<<< HEAD +}; +======= +}; +>>>>>>> refs/remotes/origin/master diff --git a/.codesandbox/node/zlib.js b/.codesandbox/node/zlib.js new file mode 100644 index 00000000..666aac34 --- /dev/null +++ b/.codesandbox/node/zlib.js @@ -0,0 +1,1543 @@ +<<<<<<< HEAD +'use strict'; +======= +"use strict"; +>>>>>>> refs/remotes/origin/master + +const { + ArrayBuffer, + MathMax, + NumberIsNaN, + ObjectDefineProperties, + ObjectDefineProperty, + ObjectEntries, + ObjectFreeze, + ObjectKeys, + ObjectSetPrototypeOf, + ReflectApply, + Symbol, + Uint32Array, +} = primordials; + +const { + codes: { + ERR_BROTLI_INVALID_PARAM, + ERR_BUFFER_TOO_LARGE, + ERR_INVALID_ARG_TYPE, + ERR_OUT_OF_RANGE, + ERR_TRAILING_JUNK_AFTER_STREAM_END, + ERR_ZSTD_INVALID_PARAM, + }, + genericNodeError, +<<<<<<< HEAD +} = require('internal/errors'); +const { Transform, finished } = require('stream'); +const { + deprecateInstantiation, +} = require('internal/util'); +======= +} = require("internal/errors"); +const { Transform, finished } = require("stream"); +const { deprecateInstantiation } = require("internal/util"); +>>>>>>> refs/remotes/origin/master +const { + isArrayBufferView, + isAnyArrayBuffer, + isUint8Array, +<<<<<<< HEAD +} = require('internal/util/types'); +const binding = internalBinding('zlib'); +const { crc32: crc32Native } = binding; +const assert = require('internal/assert'); +const { + Buffer, + kMaxLength, +} = require('buffer'); +const { owner_symbol } = require('internal/async_hooks').symbols; +======= +} = require("internal/util/types"); +const binding = internalBinding("zlib"); +const { crc32: crc32Native } = binding; +const assert = require("internal/assert"); +const { Buffer, kMaxLength } = require("buffer"); +const { owner_symbol } = require("internal/async_hooks").symbols; +>>>>>>> refs/remotes/origin/master +const { + checkRangesOrGetDefault, + validateFunction, + validateUint32, + validateFiniteNumber, +<<<<<<< HEAD +} = require('internal/validators'); + +const kFlushFlag = Symbol('kFlushFlag'); +const kError = Symbol('kError'); + +const constants = internalBinding('constants').zlib; +const { + // Zlib flush levels + Z_NO_FLUSH, Z_BLOCK, Z_PARTIAL_FLUSH, Z_SYNC_FLUSH, Z_FULL_FLUSH, Z_FINISH, + // Zlib option values + Z_MIN_CHUNK, Z_MIN_WINDOWBITS, Z_MAX_WINDOWBITS, Z_MIN_LEVEL, Z_MAX_LEVEL, + Z_MIN_MEMLEVEL, Z_MAX_MEMLEVEL, Z_DEFAULT_CHUNK, Z_DEFAULT_COMPRESSION, + Z_DEFAULT_STRATEGY, Z_DEFAULT_WINDOWBITS, Z_DEFAULT_MEMLEVEL, Z_FIXED, + // Node's compression stream modes (node_zlib_mode) + DEFLATE, DEFLATERAW, INFLATE, INFLATERAW, GZIP, GUNZIP, UNZIP, + BROTLI_DECODE, BROTLI_ENCODE, + ZSTD_COMPRESS, ZSTD_DECOMPRESS, + // Brotli operations (~flush levels) + BROTLI_OPERATION_PROCESS, BROTLI_OPERATION_FLUSH, + BROTLI_OPERATION_FINISH, BROTLI_OPERATION_EMIT_METADATA, + // Zstd end directives (~flush levels) + ZSTD_e_continue, ZSTD_e_flush, ZSTD_e_end, +======= +} = require("internal/validators"); + +const kFlushFlag = Symbol("kFlushFlag"); +const kError = Symbol("kError"); + +const constants = internalBinding("constants").zlib; +const { + // Zlib flush levels + Z_NO_FLUSH, + Z_BLOCK, + Z_PARTIAL_FLUSH, + Z_SYNC_FLUSH, + Z_FULL_FLUSH, + Z_FINISH, + // Zlib option values + Z_MIN_CHUNK, + Z_MIN_WINDOWBITS, + Z_MAX_WINDOWBITS, + Z_MIN_LEVEL, + Z_MAX_LEVEL, + Z_MIN_MEMLEVEL, + Z_MAX_MEMLEVEL, + Z_DEFAULT_CHUNK, + Z_DEFAULT_COMPRESSION, + Z_DEFAULT_STRATEGY, + Z_DEFAULT_WINDOWBITS, + Z_DEFAULT_MEMLEVEL, + Z_FIXED, + // Node's compression stream modes (node_zlib_mode) + DEFLATE, + DEFLATERAW, + INFLATE, + INFLATERAW, + GZIP, + GUNZIP, + UNZIP, + BROTLI_DECODE, + BROTLI_ENCODE, + ZSTD_COMPRESS, + ZSTD_DECOMPRESS, + // Brotli operations (~flush levels) + BROTLI_OPERATION_PROCESS, + BROTLI_OPERATION_FLUSH, + BROTLI_OPERATION_FINISH, + BROTLI_OPERATION_EMIT_METADATA, + // Zstd end directives (~flush levels) + ZSTD_e_continue, + ZSTD_e_flush, + ZSTD_e_end, +>>>>>>> refs/remotes/origin/master +} = constants; + +// Translation table for return codes. +const codes = { + Z_OK: constants.Z_OK, + Z_STREAM_END: constants.Z_STREAM_END, + Z_NEED_DICT: constants.Z_NEED_DICT, + Z_ERRNO: constants.Z_ERRNO, + Z_STREAM_ERROR: constants.Z_STREAM_ERROR, + Z_DATA_ERROR: constants.Z_DATA_ERROR, + Z_MEM_ERROR: constants.Z_MEM_ERROR, + Z_BUF_ERROR: constants.Z_BUF_ERROR, + Z_VERSION_ERROR: constants.Z_VERSION_ERROR, +}; + +for (const ckey of ObjectKeys(codes)) { + codes[codes[ckey]] = ckey; +} + +function zlibBuffer(engine, buffer, callback) { +<<<<<<< HEAD + validateFunction(callback, 'callback'); +======= + validateFunction(callback, "callback"); +>>>>>>> refs/remotes/origin/master + // Streams do not support non-Uint8Array ArrayBufferViews yet. Convert it to a + // Buffer without copying. + if (isArrayBufferView(buffer) && !isUint8Array(buffer)) { + buffer = Buffer.from(buffer.buffer, buffer.byteOffset, buffer.byteLength); + } else if (isAnyArrayBuffer(buffer)) { + buffer = Buffer.from(buffer); + } + engine.buffers = null; + engine.nread = 0; + engine.cb = callback; +<<<<<<< HEAD + engine.on('data', zlibBufferOnData); + engine.on('error', zlibBufferOnError); + engine.on('end', zlibBufferOnEnd); +======= + engine.on("data", zlibBufferOnData); + engine.on("error", zlibBufferOnError); + engine.on("end", zlibBufferOnEnd); +>>>>>>> refs/remotes/origin/master + engine.end(buffer); +} + +function zlibBufferOnData(chunk) { + if (!this.buffers) { + this.buffers = [chunk]; + } else { + this.buffers.push(chunk); + } + this.nread += chunk.length; + if (this.nread > this._maxOutputLength) { + this.close(); +<<<<<<< HEAD + this.removeAllListeners('end'); +======= + this.removeAllListeners("end"); +>>>>>>> refs/remotes/origin/master + this.cb(new ERR_BUFFER_TOO_LARGE(this._maxOutputLength)); + } +} + +function zlibBufferOnError(err) { +<<<<<<< HEAD + this.removeAllListeners('end'); +======= + this.removeAllListeners("end"); +>>>>>>> refs/remotes/origin/master + this.cb(err); +} + +function zlibBufferOnEnd() { + let buf; + if (this.nread === 0) { + buf = Buffer.alloc(0); + } else { + const bufs = this.buffers; +<<<<<<< HEAD + buf = (bufs.length === 1 ? bufs[0] : Buffer.concat(bufs, this.nread)); + } + this.close(); + if (this._info) + this.cb(null, { buffer: buf, engine: this }); + else + this.cb(null, buf); +} + +function zlibBufferSync(engine, buffer) { + if (typeof buffer === 'string') { +======= + buf = bufs.length === 1 ? bufs[0] : Buffer.concat(bufs, this.nread); + } + this.close(); + if (this._info) this.cb(null, { buffer: buf, engine: this }); + else this.cb(null, buf); +} + +function zlibBufferSync(engine, buffer) { + if (typeof buffer === "string") { +>>>>>>> refs/remotes/origin/master + buffer = Buffer.from(buffer); + } else if (!isArrayBufferView(buffer)) { + if (isAnyArrayBuffer(buffer)) { + buffer = Buffer.from(buffer); + } else { + throw new ERR_INVALID_ARG_TYPE( +<<<<<<< HEAD + 'buffer', + ['string', 'Buffer', 'TypedArray', 'DataView', 'ArrayBuffer'], + buffer, +======= + "buffer", + ["string", "Buffer", "TypedArray", "DataView", "ArrayBuffer"], + buffer +>>>>>>> refs/remotes/origin/master + ); + } + } + buffer = processChunkSync(engine, buffer, engine._finishFlushFlag); +<<<<<<< HEAD + if (engine._info) + return { buffer, engine }; +======= + if (engine._info) return { buffer, engine }; +>>>>>>> refs/remotes/origin/master + return buffer; +} + +function zlibOnError(message, errno, code) { + const self = this[owner_symbol]; + // There is no way to cleanly recover. + // Continuing only obscures problems. + + const error = genericNodeError(message, { errno, code }); + error.errno = errno; + error.code = code; + self.destroy(error); + self[kError] = error; +} + +const FLUSH_BOUND = [ +<<<<<<< HEAD + [ Z_NO_FLUSH, Z_BLOCK ], + [ BROTLI_OPERATION_PROCESS, BROTLI_OPERATION_EMIT_METADATA ], + [ ZSTD_e_continue, ZSTD_e_end ], +======= + [Z_NO_FLUSH, Z_BLOCK], + [BROTLI_OPERATION_PROCESS, BROTLI_OPERATION_EMIT_METADATA], + [ZSTD_e_continue, ZSTD_e_end], +>>>>>>> refs/remotes/origin/master +]; +const FLUSH_BOUND_IDX_NORMAL = 0; +const FLUSH_BOUND_IDX_BROTLI = 1; +const FLUSH_BOUND_IDX_ZSTD = 2; + +/** + * The base class for all Zlib-style streams. + * @class + */ +function ZlibBase(opts, mode, handle, { flush, finishFlush, fullFlush }) { + let chunkSize = Z_DEFAULT_CHUNK; + let maxOutputLength = kMaxLength; + // The ZlibBase class is not exported to user land, the mode should only be + // passed in by us. +<<<<<<< HEAD + assert(typeof mode === 'number'); +======= + assert(typeof mode === "number"); +>>>>>>> refs/remotes/origin/master + assert(mode >= DEFLATE && mode <= ZSTD_DECOMPRESS); + + let flushBoundIdx; + if (mode === BROTLI_ENCODE || mode === BROTLI_DECODE) { + flushBoundIdx = FLUSH_BOUND_IDX_BROTLI; + } else if (mode === ZSTD_COMPRESS || mode === ZSTD_DECOMPRESS) { + flushBoundIdx = FLUSH_BOUND_IDX_ZSTD; + } else { + flushBoundIdx = FLUSH_BOUND_IDX_NORMAL; + } + + if (opts) { + chunkSize = opts.chunkSize; +<<<<<<< HEAD + if (!validateFiniteNumber(chunkSize, 'options.chunkSize')) { + chunkSize = Z_DEFAULT_CHUNK; + } else if (chunkSize < Z_MIN_CHUNK) { + throw new ERR_OUT_OF_RANGE('options.chunkSize', + `>= ${Z_MIN_CHUNK}`, chunkSize); + } + + flush = checkRangesOrGetDefault( + opts.flush, 'options.flush', + FLUSH_BOUND[flushBoundIdx][0], FLUSH_BOUND[flushBoundIdx][1], flush); + + finishFlush = checkRangesOrGetDefault( + opts.finishFlush, 'options.finishFlush', + FLUSH_BOUND[flushBoundIdx][0], FLUSH_BOUND[flushBoundIdx][1], + finishFlush); + + maxOutputLength = checkRangesOrGetDefault( + opts.maxOutputLength, 'options.maxOutputLength', + 1, kMaxLength, kMaxLength); +======= + if (!validateFiniteNumber(chunkSize, "options.chunkSize")) { + chunkSize = Z_DEFAULT_CHUNK; + } else if (chunkSize < Z_MIN_CHUNK) { + throw new ERR_OUT_OF_RANGE( + "options.chunkSize", + `>= ${Z_MIN_CHUNK}`, + chunkSize + ); + } + + flush = checkRangesOrGetDefault( + opts.flush, + "options.flush", + FLUSH_BOUND[flushBoundIdx][0], + FLUSH_BOUND[flushBoundIdx][1], + flush + ); + + finishFlush = checkRangesOrGetDefault( + opts.finishFlush, + "options.finishFlush", + FLUSH_BOUND[flushBoundIdx][0], + FLUSH_BOUND[flushBoundIdx][1], + finishFlush + ); + + maxOutputLength = checkRangesOrGetDefault( + opts.maxOutputLength, + "options.maxOutputLength", + 1, + kMaxLength, + kMaxLength + ); +>>>>>>> refs/remotes/origin/master + + if (opts.encoding || opts.objectMode || opts.writableObjectMode) { + opts = { ...opts }; + opts.encoding = null; + opts.objectMode = false; + opts.writableObjectMode = false; + } + } + + ReflectApply(Transform, this, [{ autoDestroy: true, ...opts }]); + this[kError] = null; + this.bytesWritten = 0; + this._handle = handle; + handle[owner_symbol] = this; + // Used by processCallback() and zlibOnError() + handle.onerror = zlibOnError; + this._outBuffer = Buffer.allocUnsafe(chunkSize); + this._outOffset = 0; + + this._chunkSize = chunkSize; + this._defaultFlushFlag = flush; + this._finishFlushFlag = finishFlush; + this._defaultFullFlushFlag = fullFlush; + this._info = opts?.info; + this._maxOutputLength = maxOutputLength; + + this._rejectGarbageAfterEnd = opts?.rejectGarbageAfterEnd === true; +} +ObjectSetPrototypeOf(ZlibBase.prototype, Transform.prototype); +ObjectSetPrototypeOf(ZlibBase, Transform); + +<<<<<<< HEAD +ObjectDefineProperty(ZlibBase.prototype, '_closed', { +======= +ObjectDefineProperty(ZlibBase.prototype, "_closed", { +>>>>>>> refs/remotes/origin/master + __proto__: null, + configurable: true, + enumerable: true, + get() { + return !this._handle; + }, +}); + +/** + * @this {ZlibBase} + * @returns {void} + */ +<<<<<<< HEAD +ZlibBase.prototype.reset = function() { + assert(this._handle, 'zlib binding closed'); +======= +ZlibBase.prototype.reset = function () { + assert(this._handle, "zlib binding closed"); +>>>>>>> refs/remotes/origin/master + return this._handle.reset(); +}; + +/** + * @this {ZlibBase} + * This is the _flush function called by the transform class, + * internally, when the last chunk has been written. + * @returns {void} + */ +<<<<<<< HEAD +ZlibBase.prototype._flush = function(callback) { + this._transform(Buffer.alloc(0), '', callback); +======= +ZlibBase.prototype._flush = function (callback) { + this._transform(Buffer.alloc(0), "", callback); +>>>>>>> refs/remotes/origin/master +}; + +/** + * @this {ZlibBase} + * Force Transform compat behavior. + * @returns {void} + */ +<<<<<<< HEAD +ZlibBase.prototype._final = function(callback) { +======= +ZlibBase.prototype._final = function (callback) { +>>>>>>> refs/remotes/origin/master + callback(); +}; + +// If a flush is scheduled while another flush is still pending, a way to figure +// out which one is the "stronger" flush is needed. +// This is currently only used to figure out which flush flag to use for the +// last chunk. +// Roughly, the following holds: +// Z_NO_FLUSH < Z_BLOCK < Z_PARTIAL_FLUSH < +// Z_SYNC_FLUSH < Z_FULL_FLUSH < Z_FINISH +const flushiness = []; +<<<<<<< HEAD +const kFlushFlagList = [Z_NO_FLUSH, Z_BLOCK, Z_PARTIAL_FLUSH, + Z_SYNC_FLUSH, Z_FULL_FLUSH, Z_FINISH]; +======= +const kFlushFlagList = [ + Z_NO_FLUSH, + Z_BLOCK, + Z_PARTIAL_FLUSH, + Z_SYNC_FLUSH, + Z_FULL_FLUSH, + Z_FINISH, +]; +>>>>>>> refs/remotes/origin/master +for (let i = 0; i < kFlushFlagList.length; i++) { + flushiness[kFlushFlagList[i]] = i; +} + +function maxFlush(a, b) { + return flushiness[a] > flushiness[b] ? a : b; +} + +// Set up a list of 'special' buffers that can be written using .write() +// from the .flush() code as a way of introducing flushing operations into the +// write sequence. +const kFlushBuffers = []; +{ + const dummyArrayBuffer = new ArrayBuffer(); + for (const flushFlag of kFlushFlagList) { + kFlushBuffers[flushFlag] = Buffer.from(dummyArrayBuffer); + kFlushBuffers[flushFlag][kFlushFlag] = flushFlag; + } +} + +<<<<<<< HEAD +ZlibBase.prototype.flush = function(kind, callback) { + if (typeof kind === 'function' || (kind === undefined && !callback)) { +======= +ZlibBase.prototype.flush = function (kind, callback) { + if (typeof kind === "function" || (kind === undefined && !callback)) { +>>>>>>> refs/remotes/origin/master + callback = kind; + kind = this._defaultFullFlushFlag; + } + + if (this.writableFinished) { +<<<<<<< HEAD + if (callback) + process.nextTick(callback); + } else if (this.writableEnded) { + if (callback) + this.once('end', callback); + } else { + this.write(kFlushBuffers[kind], '', callback); +======= + if (callback) process.nextTick(callback); + } else if (this.writableEnded) { + if (callback) this.once("end", callback); + } else { + this.write(kFlushBuffers[kind], "", callback); +>>>>>>> refs/remotes/origin/master + } +}; + +/** + * @this {import('stream').Transform} + * @param {(err?: Error) => any} [callback] + */ +<<<<<<< HEAD +ZlibBase.prototype.close = function(callback) { +======= +ZlibBase.prototype.close = function (callback) { +>>>>>>> refs/remotes/origin/master + if (callback) finished(this, callback); + this.destroy(); +}; + +<<<<<<< HEAD +ZlibBase.prototype._destroy = function(err, callback) { +======= +ZlibBase.prototype._destroy = function (err, callback) { +>>>>>>> refs/remotes/origin/master + _close(this); + callback(err); +}; + +<<<<<<< HEAD +ZlibBase.prototype._transform = function(chunk, encoding, cb) { + let flushFlag = this._defaultFlushFlag; + // We use a 'fake' zero-length chunk to carry information about flushes from + // the public API to the actual stream implementation. + if (typeof chunk[kFlushFlag] === 'number') { +======= +ZlibBase.prototype._transform = function (chunk, encoding, cb) { + let flushFlag = this._defaultFlushFlag; + // We use a 'fake' zero-length chunk to carry information about flushes from + // the public API to the actual stream implementation. + if (typeof chunk[kFlushFlag] === "number") { +>>>>>>> refs/remotes/origin/master + flushFlag = chunk[kFlushFlag]; + } + + // For the last chunk, also apply `_finishFlushFlag`. + if (this.writableEnded && this.writableLength === chunk.byteLength) { + flushFlag = maxFlush(flushFlag, this._finishFlushFlag); + } + processChunk(this, chunk, flushFlag, cb); +}; + +<<<<<<< HEAD +ZlibBase.prototype._processChunk = function(chunk, flushFlag, cb) { + // _processChunk() is left for backwards compatibility + if (typeof cb === 'function') + processChunk(this, chunk, flushFlag, cb); + else + return processChunkSync(this, chunk, flushFlag); +======= +ZlibBase.prototype._processChunk = function (chunk, flushFlag, cb) { + // _processChunk() is left for backwards compatibility + if (typeof cb === "function") processChunk(this, chunk, flushFlag, cb); + else return processChunkSync(this, chunk, flushFlag); +>>>>>>> refs/remotes/origin/master +}; + +function processChunkSync(self, chunk, flushFlag) { + let availInBefore = chunk.byteLength; + let availOutBefore = self._chunkSize - self._outOffset; + let inOff = 0; + let availOutAfter; + let availInAfter; + + const buffers = []; + let nread = 0; + let inputRead = 0; + const state = self._writeState; + const handle = self._handle; + let buffer = self._outBuffer; + let offset = self._outOffset; + const chunkSize = self._chunkSize; + + let error; +<<<<<<< HEAD + self.on('error', function onError(er) { +======= + self.on("error", function onError(er) { +>>>>>>> refs/remotes/origin/master + error = er; + }); + + while (true) { +<<<<<<< HEAD + handle.writeSync(flushFlag, + chunk, // in + inOff, // in_off + availInBefore, // in_len + buffer, // out + offset, // out_off + availOutBefore); // out_len + if (error) + throw error; + else if (self[kError]) + throw self[kError]; +======= + handle.writeSync( + flushFlag, + chunk, // in + inOff, // in_off + availInBefore, // in_len + buffer, // out + offset, // out_off + availOutBefore + ); // out_len + if (error) throw error; + else if (self[kError]) throw self[kError]; +>>>>>>> refs/remotes/origin/master + + availOutAfter = state[0]; + availInAfter = state[1]; + +<<<<<<< HEAD + const inDelta = (availInBefore - availInAfter); +======= + const inDelta = availInBefore - availInAfter; +>>>>>>> refs/remotes/origin/master + inputRead += inDelta; + + const have = availOutBefore - availOutAfter; + if (have > 0) { + const out = buffer.slice(offset, offset + have); + offset += have; + buffers.push(out); + nread += out.byteLength; + + if (nread > self._maxOutputLength) { + _close(self); + throw new ERR_BUFFER_TOO_LARGE(self._maxOutputLength); + } +<<<<<<< HEAD + + } else { + assert(have === 0, 'have should not go down'); +======= + } else { + assert(have === 0, "have should not go down"); +>>>>>>> refs/remotes/origin/master + } + + // Exhausted the output buffer, or used all the input create a new one. + if (availOutAfter === 0 || offset >= chunkSize) { + availOutBefore = chunkSize; + offset = 0; + buffer = Buffer.allocUnsafe(chunkSize); + } + + if (availOutAfter === 0) { + // Not actually done. Need to reprocess. + // Also, update the availInBefore to the availInAfter value, + // so that if we have to hit it a third (fourth, etc.) time, + // it'll have the correct byte counts. + inOff += inDelta; + availInBefore = availInAfter; + } else { + break; + } + } + + self.bytesWritten = inputRead; + _close(self); + +<<<<<<< HEAD + if (nread === 0) + return Buffer.alloc(0); + + return (buffers.length === 1 ? buffers[0] : Buffer.concat(buffers, nread)); +======= + if (nread === 0) return Buffer.alloc(0); + + return buffers.length === 1 ? buffers[0] : Buffer.concat(buffers, nread); +>>>>>>> refs/remotes/origin/master +} + +function processChunk(self, chunk, flushFlag, cb) { + const handle = self._handle; + if (!handle) return process.nextTick(cb); + + handle.buffer = chunk; + handle.cb = cb; + handle.availOutBefore = self._chunkSize - self._outOffset; + handle.availInBefore = chunk.byteLength; + handle.inOff = 0; + handle.flushFlag = flushFlag; + +<<<<<<< HEAD + handle.write(flushFlag, + chunk, // in + 0, // in_off + handle.availInBefore, // in_len + self._outBuffer, // out + self._outOffset, // out_off + handle.availOutBefore); // out_len +======= + handle.write( + flushFlag, + chunk, // in + 0, // in_off + handle.availInBefore, // in_len + self._outBuffer, // out + self._outOffset, // out_off + handle.availOutBefore + ); // out_len +>>>>>>> refs/remotes/origin/master +} + +function processCallback() { + // This callback's context (`this`) is the `_handle` (ZCtx) object. It is + // important to null out the values once they are no longer needed since + // `_handle` can stay in memory long after the buffer is needed. + const handle = this; + const self = this[owner_symbol]; + const state = self._writeState; + + if (self.destroyed) { + this.buffer = null; + this.cb(); + return; + } + + const availOutAfter = state[0]; + const availInAfter = state[1]; + + const inDelta = handle.availInBefore - availInAfter; + self.bytesWritten += inDelta; + + const have = handle.availOutBefore - availOutAfter; + let streamBufferIsFull = false; + if (have > 0) { + const out = self._outBuffer.slice(self._outOffset, self._outOffset + have); + self._outOffset += have; + streamBufferIsFull = !self.push(out); + } else { +<<<<<<< HEAD + assert(have === 0, 'have should not go down'); +======= + assert(have === 0, "have should not go down"); +>>>>>>> refs/remotes/origin/master + } + + if (self.destroyed) { + this.cb(); + return; + } + + // Exhausted the output buffer, or used all the input create a new one. + if (availOutAfter === 0 || self._outOffset >= self._chunkSize) { + handle.availOutBefore = self._chunkSize; + self._outOffset = 0; + self._outBuffer = Buffer.allocUnsafe(self._chunkSize); + } + + if (availOutAfter === 0) { + // Not actually done. Need to reprocess. + // Also, update the availInBefore to the availInAfter value, + // so that if we have to hit it a third (fourth, etc.) time, + // it'll have the correct byte counts. + handle.inOff += inDelta; + handle.availInBefore = availInAfter; + +<<<<<<< HEAD + + if (!streamBufferIsFull) { + this.write(handle.flushFlag, + this.buffer, // in + handle.inOff, // in_off + handle.availInBefore, // in_len + self._outBuffer, // out + self._outOffset, // out_off + self._chunkSize); // out_len +======= + if (!streamBufferIsFull) { + this.write( + handle.flushFlag, + this.buffer, // in + handle.inOff, // in_off + handle.availInBefore, // in_len + self._outBuffer, // out + self._outOffset, // out_off + self._chunkSize + ); // out_len +>>>>>>> refs/remotes/origin/master + } else { + const oldRead = self._read; + self._read = (n) => { + self._read = oldRead; +<<<<<<< HEAD + this.write(handle.flushFlag, + this.buffer, // in + handle.inOff, // in_off + handle.availInBefore, // in_len + self._outBuffer, // out + self._outOffset, // out_off + self._chunkSize); // out_len +======= + this.write( + handle.flushFlag, + this.buffer, // in + handle.inOff, // in_off + handle.availInBefore, // in_len + self._outBuffer, // out + self._outOffset, // out_off + self._chunkSize + ); // out_len +>>>>>>> refs/remotes/origin/master + self._read(n); + }; + } + return; + } + + if (availInAfter > 0) { + // If we have more input that should be written, but we also have output + // space available, that means that the compression library was not + // interested in receiving more data, and in particular that the input + // stream has ended early. + // This applies to streams where we don't check data past the end of + // what was consumed; that is, everything except Gunzip/Unzip. + + if (self._rejectGarbageAfterEnd) { + const err = new ERR_TRAILING_JUNK_AFTER_STREAM_END(); + self.destroy(err); + this.cb(err); + return; + } + + self.push(null); + } + + // Finished with the chunk. + this.buffer = null; + this.cb(); +} + +/** + * @param {ZlibBase} engine + * @private + */ +function _close(engine) { + // Caller may invoke .close after a zlib error (which will null _handle) + engine._handle?.close(); + engine._handle = null; +} + +const zlibDefaultOpts = { + flush: Z_NO_FLUSH, + finishFlush: Z_FINISH, + fullFlush: Z_FULL_FLUSH, +}; +// Base class for all streams actually backed by zlib and using zlib-specific +// parameters. +function Zlib(opts, mode) { + let windowBits = Z_DEFAULT_WINDOWBITS; + let level = Z_DEFAULT_COMPRESSION; + let memLevel = Z_DEFAULT_MEMLEVEL; + let strategy = Z_DEFAULT_STRATEGY; + let dictionary; + + if (opts) { + // windowBits is special. On the compression side, 0 is an invalid value. + // But on the decompression side, a value of 0 for windowBits tells zlib + // to use the window size in the zlib header of the compressed stream. +<<<<<<< HEAD + if ((opts.windowBits == null || opts.windowBits === 0) && + (mode === INFLATE || + mode === GUNZIP || + mode === UNZIP)) { +======= + if ( + (opts.windowBits == null || opts.windowBits === 0) && + (mode === INFLATE || mode === GUNZIP || mode === UNZIP) + ) { +>>>>>>> refs/remotes/origin/master + windowBits = 0; + } else { + // `{ windowBits: 8 }` is valid for deflate but not gzip. + const min = Z_MIN_WINDOWBITS + (mode === GZIP ? 1 : 0); + windowBits = checkRangesOrGetDefault( +<<<<<<< HEAD + opts.windowBits, 'options.windowBits', + min, Z_MAX_WINDOWBITS, Z_DEFAULT_WINDOWBITS); + } + + level = checkRangesOrGetDefault( + opts.level, 'options.level', + Z_MIN_LEVEL, Z_MAX_LEVEL, Z_DEFAULT_COMPRESSION); + + memLevel = checkRangesOrGetDefault( + opts.memLevel, 'options.memLevel', + Z_MIN_MEMLEVEL, Z_MAX_MEMLEVEL, Z_DEFAULT_MEMLEVEL); + + strategy = checkRangesOrGetDefault( + opts.strategy, 'options.strategy', + Z_DEFAULT_STRATEGY, Z_FIXED, Z_DEFAULT_STRATEGY); +======= + opts.windowBits, + "options.windowBits", + min, + Z_MAX_WINDOWBITS, + Z_DEFAULT_WINDOWBITS + ); + } + + level = checkRangesOrGetDefault( + opts.level, + "options.level", + Z_MIN_LEVEL, + Z_MAX_LEVEL, + Z_DEFAULT_COMPRESSION + ); + + memLevel = checkRangesOrGetDefault( + opts.memLevel, + "options.memLevel", + Z_MIN_MEMLEVEL, + Z_MAX_MEMLEVEL, + Z_DEFAULT_MEMLEVEL + ); + + strategy = checkRangesOrGetDefault( + opts.strategy, + "options.strategy", + Z_DEFAULT_STRATEGY, + Z_FIXED, + Z_DEFAULT_STRATEGY + ); +>>>>>>> refs/remotes/origin/master + + dictionary = opts.dictionary; + if (dictionary !== undefined && !isArrayBufferView(dictionary)) { + if (isAnyArrayBuffer(dictionary)) { + dictionary = Buffer.from(dictionary); + } else { + throw new ERR_INVALID_ARG_TYPE( +<<<<<<< HEAD + 'options.dictionary', + ['Buffer', 'TypedArray', 'DataView', 'ArrayBuffer'], + dictionary, +======= + "options.dictionary", + ["Buffer", "TypedArray", "DataView", "ArrayBuffer"], + dictionary +>>>>>>> refs/remotes/origin/master + ); + } + } + } + + const handle = new binding.Zlib(mode); + // Ideally, we could let ZlibBase() set up _writeState. I haven't been able + // to come up with a good solution that doesn't break our internal API, + // and with it all supported npm versions at the time of writing. + this._writeState = new Uint32Array(2); +<<<<<<< HEAD + handle.init(windowBits, + level, + memLevel, + strategy, + this._writeState, + processCallback, + dictionary); +======= + handle.init( + windowBits, + level, + memLevel, + strategy, + this._writeState, + processCallback, + dictionary + ); +>>>>>>> refs/remotes/origin/master + + ReflectApply(ZlibBase, this, [opts, mode, handle, zlibDefaultOpts]); + + this._level = level; + this._strategy = strategy; + this._mode = mode; +} +ObjectSetPrototypeOf(Zlib.prototype, ZlibBase.prototype); +ObjectSetPrototypeOf(Zlib, ZlibBase); + +// This callback is used by `.params()` to wait until a full flush happened +// before adjusting the parameters. In particular, the call to the native +// `params()` function should not happen while a write is currently in progress +// on the threadpool. +function paramsAfterFlushCallback(level, strategy, callback) { +<<<<<<< HEAD + assert(this._handle, 'zlib binding closed'); +======= + assert(this._handle, "zlib binding closed"); +>>>>>>> refs/remotes/origin/master + this._handle.params(level, strategy); + if (!this.destroyed) { + this._level = level; + this._strategy = strategy; + if (callback) callback(); + } +} + +Zlib.prototype.params = function params(level, strategy, callback) { +<<<<<<< HEAD + checkRangesOrGetDefault(level, 'level', Z_MIN_LEVEL, Z_MAX_LEVEL); + checkRangesOrGetDefault(strategy, 'strategy', Z_DEFAULT_STRATEGY, Z_FIXED); +======= + checkRangesOrGetDefault(level, "level", Z_MIN_LEVEL, Z_MAX_LEVEL); + checkRangesOrGetDefault(strategy, "strategy", Z_DEFAULT_STRATEGY, Z_FIXED); +>>>>>>> refs/remotes/origin/master + + if (this._level !== level || this._strategy !== strategy) { + this.flush( + Z_SYNC_FLUSH, +<<<<<<< HEAD + paramsAfterFlushCallback.bind(this, level, strategy, callback), +======= + paramsAfterFlushCallback.bind(this, level, strategy, callback) +>>>>>>> refs/remotes/origin/master + ); + } else { + process.nextTick(callback); + } +}; + +// generic zlib +// minimal 2-byte header +function Deflate(opts) { + if (!(this instanceof Deflate)) { +<<<<<<< HEAD + return deprecateInstantiation(Deflate, 'DEP0184', opts); +======= + return deprecateInstantiation(Deflate, "DEP0184", opts); +>>>>>>> refs/remotes/origin/master + } + ReflectApply(Zlib, this, [opts, DEFLATE]); +} +ObjectSetPrototypeOf(Deflate.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Deflate, Zlib); + +function Inflate(opts) { + if (!(this instanceof Inflate)) { +<<<<<<< HEAD + return deprecateInstantiation(Inflate, 'DEP0184', opts); +======= + return deprecateInstantiation(Inflate, "DEP0184", opts); +>>>>>>> refs/remotes/origin/master + } + ReflectApply(Zlib, this, [opts, INFLATE]); +} +ObjectSetPrototypeOf(Inflate.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Inflate, Zlib); + +function Gzip(opts) { + if (!(this instanceof Gzip)) { +<<<<<<< HEAD + return deprecateInstantiation(Gzip, 'DEP0184', opts); +======= + return deprecateInstantiation(Gzip, "DEP0184", opts); +>>>>>>> refs/remotes/origin/master + } + ReflectApply(Zlib, this, [opts, GZIP]); +} +ObjectSetPrototypeOf(Gzip.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Gzip, Zlib); + +function Gunzip(opts) { + if (!(this instanceof Gunzip)) { +<<<<<<< HEAD + return deprecateInstantiation(Gunzip, 'DEP0184', opts); +======= + return deprecateInstantiation(Gunzip, "DEP0184", opts); +>>>>>>> refs/remotes/origin/master + } + ReflectApply(Zlib, this, [opts, GUNZIP]); +} +ObjectSetPrototypeOf(Gunzip.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Gunzip, Zlib); + +function DeflateRaw(opts) { + if (opts && opts.windowBits === 8) opts.windowBits = 9; + if (!(this instanceof DeflateRaw)) { +<<<<<<< HEAD + return deprecateInstantiation(DeflateRaw, 'DEP0184', opts); +======= + return deprecateInstantiation(DeflateRaw, "DEP0184", opts); +>>>>>>> refs/remotes/origin/master + } + ReflectApply(Zlib, this, [opts, DEFLATERAW]); +} +ObjectSetPrototypeOf(DeflateRaw.prototype, Zlib.prototype); +ObjectSetPrototypeOf(DeflateRaw, Zlib); + +function InflateRaw(opts) { + if (!(this instanceof InflateRaw)) { +<<<<<<< HEAD + return deprecateInstantiation(InflateRaw, 'DEP0184', opts); +======= + return deprecateInstantiation(InflateRaw, "DEP0184", opts); +>>>>>>> refs/remotes/origin/master + } + ReflectApply(Zlib, this, [opts, INFLATERAW]); +} +ObjectSetPrototypeOf(InflateRaw.prototype, Zlib.prototype); +ObjectSetPrototypeOf(InflateRaw, Zlib); + +function Unzip(opts) { + if (!(this instanceof Unzip)) { +<<<<<<< HEAD + return deprecateInstantiation(Unzip, 'DEP0184', opts); +======= + return deprecateInstantiation(Unzip, "DEP0184", opts); +>>>>>>> refs/remotes/origin/master + } + ReflectApply(Zlib, this, [opts, UNZIP]); +} +ObjectSetPrototypeOf(Unzip.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Unzip, Zlib); + +function createConvenienceMethod(ctor, sync) { + if (sync) { + return function syncBufferWrapper(buffer, opts) { + return zlibBufferSync(new ctor(opts), buffer); + }; + } + return function asyncBufferWrapper(buffer, opts, callback) { +<<<<<<< HEAD + if (typeof opts === 'function') { +======= + if (typeof opts === "function") { +>>>>>>> refs/remotes/origin/master + callback = opts; + opts = {}; + } + return zlibBuffer(new ctor(opts), buffer, callback); + }; +} + +const kMaxBrotliParam = MathMax( +<<<<<<< HEAD + ...ObjectEntries(constants) + .map(({ 0: key, 1: value }) => (key.startsWith('BROTLI_PARAM_') ? value : 0)), +======= + ...ObjectEntries(constants).map(({ 0: key, 1: value }) => + key.startsWith("BROTLI_PARAM_") ? value : 0 + ) +>>>>>>> refs/remotes/origin/master +); +const brotliInitParamsArray = new Uint32Array(kMaxBrotliParam + 1); + +const brotliDefaultOpts = { + flush: BROTLI_OPERATION_PROCESS, + finishFlush: BROTLI_OPERATION_FINISH, + fullFlush: BROTLI_OPERATION_FLUSH, +}; +function Brotli(opts, mode) { + assert(mode === BROTLI_DECODE || mode === BROTLI_ENCODE); + + brotliInitParamsArray.fill(-1); + if (opts?.params) { + ObjectKeys(opts.params).forEach((origKey) => { + const key = +origKey; +<<<<<<< HEAD + if (NumberIsNaN(key) || key < 0 || key > kMaxBrotliParam || + (brotliInitParamsArray[key] | 0) !== -1) { +======= + if ( + NumberIsNaN(key) || + key < 0 || + key > kMaxBrotliParam || + (brotliInitParamsArray[key] | 0) !== -1 + ) { +>>>>>>> refs/remotes/origin/master + throw new ERR_BROTLI_INVALID_PARAM(origKey); + } + + const value = opts.params[origKey]; +<<<<<<< HEAD + if (typeof value !== 'number' && typeof value !== 'boolean') { + throw new ERR_INVALID_ARG_TYPE('options.params[key]', + 'number', opts.params[origKey]); +======= + if (typeof value !== "number" && typeof value !== "boolean") { + throw new ERR_INVALID_ARG_TYPE( + "options.params[key]", + "number", + opts.params[origKey] + ); +>>>>>>> refs/remotes/origin/master + } + brotliInitParamsArray[key] = value; + }); + } + +<<<<<<< HEAD + const handle = mode === BROTLI_DECODE ? + new binding.BrotliDecoder(mode) : new binding.BrotliEncoder(mode); +======= + const handle = + mode === BROTLI_DECODE + ? new binding.BrotliDecoder(mode) + : new binding.BrotliEncoder(mode); +>>>>>>> refs/remotes/origin/master + + this._writeState = new Uint32Array(2); + handle.init(brotliInitParamsArray, this._writeState, processCallback); + + ReflectApply(ZlibBase, this, [opts, mode, handle, brotliDefaultOpts]); +} +ObjectSetPrototypeOf(Brotli.prototype, Zlib.prototype); +ObjectSetPrototypeOf(Brotli, Zlib); + +function BrotliCompress(opts) { + if (!(this instanceof BrotliCompress)) { +<<<<<<< HEAD + return deprecateInstantiation(BrotliCompress, 'DEP0184', opts); +======= + return deprecateInstantiation(BrotliCompress, "DEP0184", opts); +>>>>>>> refs/remotes/origin/master + } + ReflectApply(Brotli, this, [opts, BROTLI_ENCODE]); +} +ObjectSetPrototypeOf(BrotliCompress.prototype, Brotli.prototype); +ObjectSetPrototypeOf(BrotliCompress, Brotli); + +function BrotliDecompress(opts) { + if (!(this instanceof BrotliDecompress)) { +<<<<<<< HEAD + return deprecateInstantiation(BrotliDecompress, 'DEP0184', opts); +======= + return deprecateInstantiation(BrotliDecompress, "DEP0184", opts); +>>>>>>> refs/remotes/origin/master + } + ReflectApply(Brotli, this, [opts, BROTLI_DECODE]); +} +ObjectSetPrototypeOf(BrotliDecompress.prototype, Brotli.prototype); +ObjectSetPrototypeOf(BrotliDecompress, Brotli); + +<<<<<<< HEAD + +======= +>>>>>>> refs/remotes/origin/master +const zstdDefaultOpts = { + flush: ZSTD_e_continue, + finishFlush: ZSTD_e_end, + fullFlush: ZSTD_e_flush, +}; +class Zstd extends ZlibBase { + constructor(opts, mode, initParamsArray, maxParam) { + assert(mode === ZSTD_COMPRESS || mode === ZSTD_DECOMPRESS); + + initParamsArray.fill(-1); + if (opts?.params) { + ObjectKeys(opts.params).forEach((origKey) => { + const key = +origKey; +<<<<<<< HEAD + if (NumberIsNaN(key) || key < 0 || key > maxParam || + (initParamsArray[key] | 0) !== -1) { +======= + if ( + NumberIsNaN(key) || + key < 0 || + key > maxParam || + (initParamsArray[key] | 0) !== -1 + ) { +>>>>>>> refs/remotes/origin/master + throw new ERR_ZSTD_INVALID_PARAM(origKey); + } + + const value = opts.params[origKey]; +<<<<<<< HEAD + if (typeof value !== 'number' && typeof value !== 'boolean') { + throw new ERR_INVALID_ARG_TYPE('options.params[key]', + 'number', opts.params[origKey]); +======= + if (typeof value !== "number" && typeof value !== "boolean") { + throw new ERR_INVALID_ARG_TYPE( + "options.params[key]", + "number", + opts.params[origKey] + ); +>>>>>>> refs/remotes/origin/master + } + initParamsArray[key] = value; + }); + } + +<<<<<<< HEAD + const handle = mode === ZSTD_COMPRESS ? + new binding.ZstdCompress() : new binding.ZstdDecompress(); +======= + const handle = + mode === ZSTD_COMPRESS + ? new binding.ZstdCompress() + : new binding.ZstdDecompress(); +>>>>>>> refs/remotes/origin/master + + const pledgedSrcSize = opts?.pledgedSrcSize ?? undefined; + + const writeState = new Uint32Array(2); + + handle.init( + initParamsArray, + pledgedSrcSize, + writeState, + processCallback, +<<<<<<< HEAD + opts?.dictionary && isArrayBufferView(opts.dictionary) ? opts.dictionary : undefined, +======= + opts?.dictionary && isArrayBufferView(opts.dictionary) + ? opts.dictionary + : undefined +>>>>>>> refs/remotes/origin/master + ); + + super(opts, mode, handle, zstdDefaultOpts); + this._writeState = writeState; + } +} + +<<<<<<< HEAD +const kMaxZstdCParam = MathMax(...ObjectKeys(constants).map( + (key) => (key.startsWith('ZSTD_c_') ? + constants[key] : + 0), +)); +======= +const kMaxZstdCParam = MathMax( + ...ObjectKeys(constants).map((key) => + key.startsWith("ZSTD_c_") ? constants[key] : 0 + ) +); +>>>>>>> refs/remotes/origin/master + +const zstdInitCParamsArray = new Uint32Array(kMaxZstdCParam + 1); + +class ZstdCompress extends Zstd { + constructor(opts) { + super(opts, ZSTD_COMPRESS, zstdInitCParamsArray, kMaxZstdCParam); + } +} + +<<<<<<< HEAD +const kMaxZstdDParam = MathMax(...ObjectKeys(constants).map( + (key) => (key.startsWith('ZSTD_d_') ? + constants[key] : + 0), +)); +======= +const kMaxZstdDParam = MathMax( + ...ObjectKeys(constants).map((key) => + key.startsWith("ZSTD_d_") ? constants[key] : 0 + ) +); +>>>>>>> refs/remotes/origin/master + +const zstdInitDParamsArray = new Uint32Array(kMaxZstdDParam + 1); + +class ZstdDecompress extends Zstd { + constructor(opts) { + super(opts, ZSTD_DECOMPRESS, zstdInitDParamsArray, kMaxZstdDParam); + } +} + +function createProperty(ctor) { + return { + __proto__: null, + configurable: true, + enumerable: true, +<<<<<<< HEAD + value: function(options) { +======= + value: function (options) { +>>>>>>> refs/remotes/origin/master + return new ctor(options); + }, + }; +} + +function crc32(data, value = 0) { +<<<<<<< HEAD + if (typeof data !== 'string' && !isArrayBufferView(data)) { + throw new ERR_INVALID_ARG_TYPE('data', ['Buffer', 'TypedArray', 'DataView', 'string'], data); + } + validateUint32(value, 'value'); +======= + if (typeof data !== "string" && !isArrayBufferView(data)) { + throw new ERR_INVALID_ARG_TYPE( + "data", + ["Buffer", "TypedArray", "DataView", "string"], + data + ); + } + validateUint32(value, "value"); +>>>>>>> refs/remotes/origin/master + return crc32Native(data, value); +} + +// Legacy alias on the C++ wrapper object. This is not public API, so we may +// want to runtime-deprecate it at some point. There's no hurry, though. +<<<<<<< HEAD +ObjectDefineProperty(binding.Zlib.prototype, 'jsref', { + __proto__: null, + get() { return this[owner_symbol]; }, + set(v) { return this[owner_symbol] = v; }, +======= +ObjectDefineProperty(binding.Zlib.prototype, "jsref", { + __proto__: null, + get() { + return this[owner_symbol]; + }, + set(v) { + return (this[owner_symbol] = v); + }, +>>>>>>> refs/remotes/origin/master +}); + +module.exports = { + crc32, + Deflate, + Inflate, + Gzip, + Gunzip, + DeflateRaw, + InflateRaw, + Unzip, + BrotliCompress, + BrotliDecompress, + ZstdCompress, + ZstdDecompress, + + // Convenience methods. + // compress/decompress a string or buffer in one step. + deflate: createConvenienceMethod(Deflate, false), + deflateSync: createConvenienceMethod(Deflate, true), + gzip: createConvenienceMethod(Gzip, false), + gzipSync: createConvenienceMethod(Gzip, true), + deflateRaw: createConvenienceMethod(DeflateRaw, false), + deflateRawSync: createConvenienceMethod(DeflateRaw, true), + unzip: createConvenienceMethod(Unzip, false), + unzipSync: createConvenienceMethod(Unzip, true), + inflate: createConvenienceMethod(Inflate, false), + inflateSync: createConvenienceMethod(Inflate, true), + gunzip: createConvenienceMethod(Gunzip, false), + gunzipSync: createConvenienceMethod(Gunzip, true), + inflateRaw: createConvenienceMethod(InflateRaw, false), + inflateRawSync: createConvenienceMethod(InflateRaw, true), + brotliCompress: createConvenienceMethod(BrotliCompress, false), + brotliCompressSync: createConvenienceMethod(BrotliCompress, true), + brotliDecompress: createConvenienceMethod(BrotliDecompress, false), + brotliDecompressSync: createConvenienceMethod(BrotliDecompress, true), + zstdCompress: createConvenienceMethod(ZstdCompress, false), + zstdCompressSync: createConvenienceMethod(ZstdCompress, true), + zstdDecompress: createConvenienceMethod(ZstdDecompress, false), + zstdDecompressSync: createConvenienceMethod(ZstdDecompress, true), +}; + +ObjectDefineProperties(module.exports, { + createDeflate: createProperty(Deflate), + createInflate: createProperty(Inflate), + createDeflateRaw: createProperty(DeflateRaw), + createInflateRaw: createProperty(InflateRaw), + createGzip: createProperty(Gzip), + createGunzip: createProperty(Gunzip), + createUnzip: createProperty(Unzip), + createBrotliCompress: createProperty(BrotliCompress), + createBrotliDecompress: createProperty(BrotliDecompress), + createZstdCompress: createProperty(ZstdCompress), + createZstdDecompress: createProperty(ZstdDecompress), + constants: { + __proto__: null, + configurable: false, + enumerable: true, + value: constants, + }, + codes: { + __proto__: null, + enumerable: true, + writable: false, + value: ObjectFreeze(codes), + }, +}); + +// These should be considered deprecated +// expose all the zlib constants +for (const { 0: key, 1: value } of ObjectEntries(constants)) { +<<<<<<< HEAD + if (key.startsWith('BROTLI')) continue; +======= + if (key.startsWith("BROTLI")) continue; +>>>>>>> refs/remotes/origin/master + ObjectDefineProperty(module.exports, key, { + __proto__: null, + enumerable: false, + value, + writable: false, + }); +<<<<<<< HEAD +} +======= +} +>>>>>>> refs/remotes/origin/master diff --git a/.env b/.env new file mode 100644 index 00000000..c73b51f8 --- /dev/null +++ b/.env @@ -0,0 +1,4 @@ +API_="my secret api key from.env" +DATABASE_SE="super secret" + +MONGO_URL mongodb+srv://:@cluster0.a9ylqls.mongodb.net/?retryWrites=true&w=majority&appName=Cluster0 \ No newline at end of file diff --git a/.gitignore b/.gitignore index f1ff414e..694dae43 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,4 @@ node_modules -.DS_Store -.env -.env.local -.env.development.local -.env.test.local -.env.production.local -package-lock.json \ No newline at end of file + +# Local Netlify folder +.netlify diff --git a/.idx/dev.nix b/.idx/dev.nix new file mode 100644 index 00000000..b15c7975 --- /dev/null +++ b/.idx/dev.nix @@ -0,0 +1,67 @@ +# To learn more about how to use Nix to configure your environment +# see: https://firebase.google.com/docs/studio/customize-workspace +{ pkgs, ... }: { + # Which nixpkgs channel to use. + channel = "stable-24.05"; # or "unstable" + + # Use https://search.nixos.org/packages to find packages + packages = [ + pkgs.gcc, + pkgs.make, + pkgs.pkg-config, + pkgs.doas-sudo-shim + pkgs.sudo + pkgs.sudo-rs + pkgs.go + pkgs.python311 + pkgs.python311Packages.pip + pkgs.nodejs_20 + pkgs.nodePackages.nodemon + pkgs.python312Packages.pip + pkgs.openssh + pkgs.eval + pkgs.busybox + pkgs.openssh_gssapi + pkgs.openssh_hpn + ]; + + # Sets environment variables in the workspace + env = {}; + idx = { + # Search for the extensions you want on https://open-vsx.org/ and use "publisher.id" + extensions = [ + # "vscodevim.vim" + ]; + + # Enable previews + previews = { + enable = true; + previews = { + # web = { + # # Example: run "npm run dev" with PORT set to IDX's defined port for previews, + # # and show it in IDX's web preview panel + # command = ["npm" "run" "dev"]; + # manager = "web"; + # env = { + # # Environment variables to set for your server + # PORT = "$PORT"; + # }; + # }; + }; + }; + + # Workspace lifecycle hooks + workspace = { + # Runs when a workspace is first created + onCreate = { + # Example: install JS dependencies from NPM + # npm-install = "npm install"; + }; + # Runs when the workspace is (re)started + onStart = { + # Example: start a background task to watch and re-build backend code + # watch-backend = "npm run watch-backend"; + }; + }; + }; +} diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000..fa18a53d --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,26 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "type": "node", + "request": "launch", + "name": "Launch Program", + "runtimeExecutable": "npm", + "runtimeArgs": [ + "start" + ], + "cwd": "${workspaceFolder}", + "internalConsoleOptions": "neverOpen" + }, + { + "name": "Python Debugger: Current File", + "type": "debugpy", + "request": "launch", + "program": "${file}", + "console": "integratedTerminal" + } + ] +} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..8277e5a8 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,6 @@ +{ + "IDX.aI.enableInlineCompletion": true, + "IDX.aI.enableCodebaseIndexing": true, + "python-envs.defaultEnvManager": "ms-python.python:system", + "python-envs.pythonProjects": [] +} \ No newline at end of file diff --git a/1-multiplication.spec.js b/1-multiplication.spec.js new file mode 100644 index 00000000..ce7ef15d --- /dev/null +++ b/1-multiplication.spec.js @@ -0,0 +1,7 @@ +import { multiply } from './1-multiplication'; + +describe('multiplication', () => { + it('should multiply two numbers correctly', () => { + expect(multiply(2, 3)).toBe(6); + }); +}); \ No newline at end of file diff --git a/2-first-last.js b/2-first-last.js new file mode 100644 index 00000000..403a87dd --- /dev/null +++ b/2-first-last.js @@ -0,0 +1,4 @@ +export const firstLast = (items) => { + return 'First: ${items[0]}, Last: ${items[items[1]}' + +} \ No newline at end of file diff --git a/App.css b/App.css new file mode 100644 index 00000000..e7e7b7c7 --- /dev/null +++ b/App.css @@ -0,0 +1,92 @@ +@import "tailwindcss"; +#root { + margin: 0 auto; + } +} +.App { + font-family: sans-serif; + text-align: center; +} +.input-container { + position: relative; +} +input { + border: 1px solid #e4e4e7; + padding: 0.5rem 0.5rem; + width: 320 - 1600px; +} +.error { + position: absolute; + top: 1.5rem; + left: 0; + color: red; +} + + body { + margin: 0; + font-family: "Montserrat"; + } + + h1, section, header { + padding: 2rem; + } + + .light { + background: aquamarine; + color: rgb(0, 24, 164); + } + + .dark { + background: rgb(0, 24, 164); + color: aquamarine; + } + + button { + padding: 18px; + border-radius: 30px; + border: 0; + background: hotpink; + color: white; + font-size: 16px; + font-weight: bold; + font-family: "Montserrat"; + margin-right: 0.5rem; + } + + select { + border: none; + border-radius: 4px; + color: rgb(0, 24, 164); + font-size: 16px; + font-weight: 600; + height: 40px; + font-family: 'Montserrat'; + padding: 0 20px; + cursor: pointer; + + -webkit-appearance: none; + -moz-appearance: none; + appearance: none; + } + + main { + padding: 2rem; + } + + .thought { + border: 2px solid blue; + padding: 1rem; + margin-bottom: 1rem; + } + + form { + display: flex; + flex-direction: column; + width: 200px; + padding-bottom: 2rem; + } + + textarea { + height: 100px; + margin-bottom: 1rem; + } \ No newline at end of file diff --git a/App.jsx b/App.jsx new file mode 100644 index 00000000..10f252d0 --- /dev/null +++ b/App.jsx @@ -0,0 +1,61 @@ +import { useState } from "react" +import { useEffect } from "react" + +// Removed duplicate declaration of App component +export const App = () => { + const [count, setCount] = useState(0); + + useEffect(() => { + const handleScroll = () => { + console.log('scrolled!'); + }; + + window.addEventListener('scroll', handleScroll); + return () => { + window.removeEventListener('scroll', handleScroll); + }; + }, []); + + useEffect(() => { + const controller = new AbortController(); + + fetch('https://happy-thoughts-ux7hkzgmwa-uc.a.run.app/thoughts', { signal: controller.signal }) + .then(response => response.json()) + .then(data => console.log(data)); + + const intervalId = setInterval(() => { + fetch('https://happy-thoughts-ux7hkzgmwa-uc.a.run.app/thoughts', { signal: controller.signal }) + .then(response => response.json()) + .then(data => console.log(data)); + + console.log('This runs every second'); + }, 1000); + + return () => { + clearInterval(intervalId); + controller.abort(); + }; + }, []); + return ( +
+ + + + + +

Count: {count}

+ {count > 140 &&

You hit 140!

} +
+ ); + }; + \ No newline at end of file diff --git a/App.tsx b/App.tsx new file mode 100644 index 00000000..213743f7 --- /dev/null +++ b/App.tsx @@ -0,0 +1,126 @@ +import React, { useState } from 'react'; +import { useEffect } from 'react'; +import Card from './Card.tsx'; +import './index.css' +import './components/Card.css' +import './components/Card.tsx' +import './components/Card.jsx' +import './components/form.jsx' +import './components/index.json' +import './components/main.tsx' +import './components/App.css' +import './components/App.js' +import './components/App.jsx' +import './components/index.css' +import './components/index.js' +import './components/tests.ts' +import './components/index.html' +import './components/tests.tsx' +import './form.css'; +// import { Form } from './form' // Removed as 'Form' is not exported from './form' +import './App.css'; +import './index.css'; +import { main } from './main.jsx'; +import Main from './main.tsx'; + +fetch ("https://happy-thoughts-ux7hkzgmwa-uc.a.run.app/thoughts") +fetch ("https://happy-thoughts-ux7hkzgmwa-uc.a.run.app/thoughts/THOUGHT_ID/like") +const [thoughts, setThoughts] = useState<{ message: string }[]>([]) +const handleFormSubmit = (event) => { + event.preventDefault() + fetch("", { + method: "POST", + body: JSON.stringify({ + message: "Hello world", + }), + headers: { "Content-Type": "application/json" }, + }) + .then((res) => res.json()) + .then((newThought) => { + setThoughts((previousThoughts) => [newThought, ...previousThoughts]) + }) +} + + +// if creating a Review as an object +interface Review { + id: number; + text: string; + dessert: string; +} + +const App = () => { + const [selectedCard, setSelectedCard] = useState(''); + const [reviews, setReviews] = useState([]); // if implementing an array of reviews + const [review, setReview] = useState(''); // if implementing one review only, as string. + const [reviewText, setReviewText] = useState(''); + + const handleCardSelect = (title: string) => { + setSelectedCard(title); + }; + + // setting a signle review as a text, then clearing the text area + const handleReviewSubmit = (e: React.FormEvent) => { + e.preventDefault(); // stop from doing its default re render here. + setReview(reviewText); + setReviewText(''); + }; + + return ( +
+
+ handleCardSelect("Message App")} + /> +
+ + +
+

Write a message

+
+