stream#PassThrough JavaScript Examples
The following examples show how to use
stream#PassThrough.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: index.es.js From ipfs-action with MIT License | 6 votes |
/**
* Clone body given Res/Req instance
*
* @param {Response|Request} instance Response or Request instance
* @return {string|Blob|Buffer|Stream}
*/
function clone(instance) {
let p1, p2;
let body = instance.body; // don't allow cloning a used body
if (instance.bodyUsed) {
throw new Error('cannot clone body after it is used');
} // check that body is a stream and not form-data object
// note: we can't clone the form-data object without having it as a dependency
if (body instanceof Stream && typeof body.getBoundary !== 'function') {
// tee instance body
p1 = new PassThrough();
p2 = new PassThrough();
body.pipe(p1);
body.pipe(p2); // set instance body to teed body and return the other teed body
instance.body = p1;
body = p2;
}
return body;
}
Example #2
Source File: index.es.js From ipfs-action with MIT License | 6 votes |
function writeToStream(dest, instance) {
const body = instance.body;
if (body === null) {
// body is null
dest.end();
} else if (typeof body === 'string') {
// body is string
dest.write(body);
dest.end();
} else if (body instanceof Blob) {
// body is blob
dest.write(body[BUFFER]);
dest.end();
} else if (Buffer.isBuffer(body)) {
// body is buffer
dest.write(body);
dest.end();
} else {
// body is stream
if (instance.useElectronNet) {
dest.chunkedEncoding = instance.chunkedEncoding; // Force a first write to start the request otherwise an empty body stream
// will cause an error when closing the dest stream with Electron v7.
dest.write('');
}
body.pipe(new PassThrough()) // I have to put a PassThrough because somehow, FormData streams are not eaten by electron/net
.pipe(dest);
}
}
Example #3
Source File: index.es.js From Lynx with MIT License | 6 votes |
/**
* Clone body given Res/Req instance
*
* @param Mixed instance Response or Request instance
* @return Mixed
*/
function clone(instance) {
let p1, p2;
let body = instance.body;
// don't allow cloning a used body
if (instance.bodyUsed) {
throw new Error('cannot clone body after it is used');
}
// check that body is a stream and not form-data object
// note: we can't clone the form-data object without having it as a dependency
if (body instanceof Stream && typeof body.getBoundary !== 'function') {
// tee instance body
p1 = new PassThrough();
p2 = new PassThrough();
body.pipe(p1);
body.pipe(p2);
// set instance body to teed body and return the other teed body
instance[INTERNALS].body = p1;
body = p2;
}
return body;
}
Example #4
Source File: debug.js From aresrpg with MIT License | 5 votes |
/**
* @param {{ world: any, app: import('fastify').FastifyInstance }} param
*/
function behavior({ world, app }) {
const serializer = new XMLSerializer()
const behavior_trees = Object.entries(trees).map(([type, tree]) => ({
id: type,
name: Entities[type].displayName,
tree: serializer.serializeToString(tree),
instances: world.mobs.all
.filter(mob => mob.type === type)
.map(({ entity_id }) => ({
id: entity_id,
})),
}))
app.get('/behavior', async () => behavior_trees)
const instances = world.mobs.all.map(({ entity_id }) => ({
entity_id,
current: {},
stream: new PassThrough({ objectMode: true }),
}))
app.get('/behavior/:id', (request, reply) => {
reply.type('text/event-stream')
reply.header('Connection', 'keep-alive')
const format = data => `data: ${JSON.stringify(data)}\n\n`
/** @type {Object} */
const { params } = request
const instance = instances.find(
({ entity_id }) => entity_id === Number(params.id)
)
const stream = new PassThrough()
for (const data of Object.entries(instance.current)) {
stream.write(format(data))
}
Readable.from(
aiter(instance.stream[Symbol.asyncIterator]()).map(format)
).pipe(stream)
reply.send(stream)
})
const statuses = {
[SUCCESS]: 'SUCCESS',
[FAILURE]: 'FAILURE',
[RUNNING]: 'RUNNING',
}
return ({ context: { path, entity_id }, result }) => {
const status = statuses[result.status]
const instance = instances.find(({ entity_id: id }) => id === entity_id)
const key = path
.split('.')
.slice(1) // Skip tree.
.filter((e, i) => i % 2 === 1)
.join('.')
instance.current = {
...instance.current,
[key]: status,
}
instance.stream.write([key, status])
}
}
Example #5
Source File: position.js From aresrpg with MIT License | 5 votes |
export default function observe_world({ mobs }) {
const actions = new PassThrough({ objectMode: true })
mobs.positions.on('*', payload =>
actions.write({ type: 'mob_position', payload })
)
/** @type {import('../context.js').Observer} */
function observe({ events }) {
events.on(Context.CHUNK_LOADED, ({ x, z, signal }) => {
actions.write({
type: 'client_chunk_loaded',
payload: { events, x, z, signal },
})
})
events.on(Context.CHUNK_UNLOADED, ({ x, z }) =>
actions.write({
type: 'client_chunk_unloaded',
payload: { events, x, z },
})
)
}
aiter(actions).reduce(
(mobs_by_chunk, { type, payload: { x, z, ...payload } }) => {
const mobs = mobs_by_chunk.get(chunk_index(x, z)) ?? []
if (type === 'client_chunk_loaded')
payload.events.emit(Context.CHUNK_LOADED_WITH_MOBS, {
mobs,
x,
z,
signal: payload.signal,
})
else if (type === 'client_chunk_unloaded')
payload.events.emit(Context.CHUNK_UNLOADED_WITH_MOBS, { mobs, x, z })
else if (type === 'mob_position') {
const { mob, last_position, position } = payload
if (last_position == null)
return new Map([
...mobs_by_chunk.entries(),
[chunk_index(x, z), [...mobs, { mob, position }]],
])
else if (!same_chunk(last_position, position)) {
const last_x = chunk_position(last_position.x)
const last_z = chunk_position(last_position.z)
const last_mobs = mobs_by_chunk
.get(chunk_index(last_x, last_z))
.filter(({ mob: { entity_id } }) => entity_id !== mob.entity_id)
return new Map([
...mobs_by_chunk.entries(),
[chunk_index(last_x, last_z), last_mobs],
[chunk_index(x, z), [...mobs, { mob, position }]],
])
}
} else throw new Error(`unknown type: ${type}`)
return mobs_by_chunk
},
new Map()
)
return {
observe,
}
}
Example #6
Source File: mobs.js From aresrpg with MIT License | 5 votes |
/** @param {import('./context.js').InitialWorld} world */
export function register(world) {
const mobs = world.mob_positions.map(({ position, type, level }, i) => {
const { speed = DEFAULT_SPEED, health } = Entities[type]
const initial_state = {
path: [position],
open: [],
closed: [],
start_time: 0,
speed:
(1 / (speed * MOVEMENT_SPEED_TO_BLOCKS_PER_SECOND)) *
1000 /* ms/block */,
health /* halfheart */,
blackboard: {},
attack_sequence_number: 0,
wakeup_at: 0,
sleep_id: null,
look_at: { player: false, yaw: 0, pitch: 0 },
}
const actions = new PassThrough({ objectMode: true })
const events = new EventEmitter()
events.setMaxListeners(Infinity)
const entity_id = world.next_entity_id + i
aiter(actions).reduce(async (last_state, action) => {
const state = await reduce_mob(last_state, action, {
world: world.get(),
type,
entity_id,
})
events.emit(Mob.STATE, state)
return state
}, initial_state)
setImmediate(() => events.emit(Mob.STATE, initial_state))
const get_state = last_event_value(events, Mob.STATE)
return {
entity_id,
type,
level,
events,
get_state,
constants: entitiesByName[Entities[type].minecraft_entity],
position(time = Date.now()) {
const { path, start_time, speed } = get_state()
return path_position({ path, time, start_time, speed })
},
dispatch(action_type, payload, time = Date.now()) {
actions.write({ type: action_type, payload, time })
},
}
})
observe_mobs(mobs)
const { next_entity_id } = world
return {
...world,
next_entity_id: next_entity_id + mobs.length,
mobs: {
all: mobs,
by_entity_id(id) {
if (id >= next_entity_id && id <= next_entity_id + mobs.length)
return mobs[id - next_entity_id]
else return null
},
},
}
}
Example #7
Source File: [[...path]].js From stacker.news with MIT License | 5 votes |
export default async function handler (req, res) {
return new Promise(resolve => {
const joinedPath = path.join(...(req.query.path || []))
const searchQ = req.query.q ? `?q=${req.query.q}` : ''
const s3PathPUT = s3PathPrefix + (joinedPath === '.' ? '_' : joinedPath) + searchQ
const s3PathGET = s3PathPrefix + (joinedPath === '.' ? '_' : joinedPath) + encodeS3URI(searchQ)
const url = process.env.PUBLIC_URL + '/' + joinedPath + searchQ
const aws = new AWS.S3({apiVersion: '2006-03-01'})
// check to see if we have a recent version of the object
aws.headObject({
Bucket: bucketName,
Key: s3PathPUT,
IfModifiedSince : new Date(new Date().getTime() - 15*60000)
}).promise().then(() => {
// this path is cached so return it
res.writeHead(302, { Location: bucketUrl + s3PathGET }).end()
resolve()
}).catch(() => {
// we don't have it cached, so capture it and cache it
if (capturing) {
return res.writeHead(503, {
'Retry-After' : 1
}).end()
}
capturing = true
const pass = new PassThrough()
aws.upload({
Bucket: bucketName,
Key: s3PathPUT,
ACL: 'public-read',
Body: pass,
ContentType: contentType
}).promise().catch(console.log)
res.setHeader('Content-Type', contentType)
const capture = spawn(
'node', ['./spawn/capture.js', url], {maxBuffer: 1024*1024*5})
capture.on('close', code => {
if (code !== 0) {
res.status(500).end()
} else {
res.status(200).end()
}
pass.end()
capture.removeAllListeners()
capturing = false
resolve()
})
capture.on('error', err => console.log('error', err))
capture.stderr.on('data', data => console.log('error stderr', data.toString()))
capture.stdout.on('data', data => {
res.write(data)
pass.write(data)
})
})
})
}
Example #8
Source File: index.es.js From ipfs-action with MIT License | 4 votes |
/**
* Fetch function
*
* @param {string|Request} url Absolute url or Request instance
* @param {Object} [opts] Fetch options
* @return {Promise}
*/
function fetch(url, opts = {}) {
// wrap http.request into fetch
return isReady.then(() => new Promise((resolve$1, reject) => {
// build request object
const request = new Request(url, opts);
const options = getNodeRequestOptions(request);
const send = request.useElectronNet ? electron.net.request : (options.protocol === 'https:' ? https : http).request; // http.request only support string as host header, this hack make custom host header possible
if (options.headers.host) {
options.headers.host = options.headers.host[0];
}
if (request.signal && request.signal.aborted) {
reject(new FetchError('request aborted', 'abort'));
return;
} // send request
let headers;
if (request.useElectronNet) {
headers = options.headers;
delete options.headers;
options.session = opts.session || electron.session.defaultSession;
options.useSessionCookies = request.useSessionCookies;
} else {
if (opts.agent) options.agent = opts.agent;
}
const req = send(options);
if (request.useElectronNet) {
for (const headerName in headers) {
if (typeof headers[headerName] === 'string') req.setHeader(headerName, headers[headerName]);else {
for (var _iterator = _createForOfIteratorHelperLoose(headers[headerName]), _step; !(_step = _iterator()).done;) {
const headerValue = _step.value;
req.setHeader(headerName, headerValue);
}
}
}
}
let reqTimeout;
const cancelRequest = () => {
if (request.useElectronNet) {
req.abort(); // in electron, `req.destroy()` does not send abort to server
} else {
req.destroy(); // in node.js, `req.abort()` is deprecated
}
};
const abortRequest = () => {
const err = new FetchError('request aborted', 'abort');
reject(err);
cancelRequest();
req.emit('error', err);
};
if (request.signal) {
request.signal.addEventListener('abort', abortRequest);
}
if (request.timeout) {
reqTimeout = setTimeout(() => {
const err = new FetchError(`network timeout at: ${request.url}`, 'request-timeout');
reject(err);
cancelRequest();
}, request.timeout);
}
if (request.useElectronNet) {
// handle authenticating proxies
req.on('login', (authInfo, callback) => {
if (opts.user && opts.password) {
callback(opts.user, opts.password);
} else {
cancelRequest();
reject(new FetchError(`login event received from ${authInfo.host} but no credentials provided`, 'proxy', {
code: 'PROXY_AUTH_FAILED'
}));
}
});
}
req.on('error', err => {
clearTimeout(reqTimeout);
if (request.signal) {
request.signal.removeEventListener('abort', abortRequest);
}
reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
});
req.on('abort', () => {
clearTimeout(reqTimeout);
if (request.signal) {
request.signal.removeEventListener('abort', abortRequest);
}
});
req.on('response', res => {
clearTimeout(reqTimeout);
if (request.signal) {
request.signal.removeEventListener('abort', abortRequest);
} // handle redirect
if (fetch.isRedirect(res.statusCode) && request.redirect !== 'manual') {
if (request.redirect === 'error') {
reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect'));
return;
}
if (request.counter >= request.follow) {
reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
return;
}
if (!res.headers.location) {
reject(new FetchError(`redirect location header missing at: ${request.url}`, 'invalid-redirect'));
return;
} // per fetch spec, for POST request with 301/302 response, or any request with 303 response, use GET when following redirect
if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
request.method = 'GET';
request.body = null;
request.headers.delete('content-length');
}
request.counter++;
resolve$1(fetch(resolve(request.url, res.headers.location), request));
return;
} // normalize location header for manual redirect mode
const headers = new Headers();
for (var _i = 0, _Object$keys = Object.keys(res.headers); _i < _Object$keys.length; _i++) {
const name = _Object$keys[_i];
if (Array.isArray(res.headers[name])) {
for (var _iterator2 = _createForOfIteratorHelperLoose(res.headers[name]), _step2; !(_step2 = _iterator2()).done;) {
const val = _step2.value;
headers.append(name, val);
}
} else {
headers.append(name, res.headers[name]);
}
}
if (request.redirect === 'manual' && headers.has('location')) {
headers.set('location', resolve(request.url, headers.get('location')));
} // prepare response
let body = new PassThrough();
res.on('error', err => body.emit('error', err));
res.pipe(body);
body.on('error', cancelRequest);
body.on('cancel-request', cancelRequest);
const abortBody = () => {
res.destroy();
res.emit('error', new FetchError('request aborted', 'abort')); // separated from the `.destroy()` because somehow Node's IncomingMessage streams do not emit errors on destroy
};
if (request.signal) {
request.signal.addEventListener('abort', abortBody);
res.on('end', () => {
request.signal.removeEventListener('abort', abortBody);
});
res.on('error', () => {
request.signal.removeEventListener('abort', abortBody);
});
}
const responseOptions = {
url: request.url,
status: res.statusCode,
statusText: res.statusMessage,
headers: headers,
size: request.size,
timeout: request.timeout,
useElectronNet: request.useElectronNet,
useSessionCookies: request.useSessionCookies
}; // HTTP-network fetch step 16.1.2
const codings = headers.get('Content-Encoding'); // HTTP-network fetch step 16.1.3: handle content codings
// in following scenarios we ignore compression support
// 1. running on Electron/net module (it manages it for us)
// 2. HEAD request
// 3. no Content-Encoding header
// 4. no content response (204)
// 5. content not modified response (304)
if (!request.useElectronNet && request.method !== 'HEAD' && codings !== null && res.statusCode !== 204 && res.statusCode !== 304) {
// Be less strict when decoding compressed responses, since sometimes
// servers send slightly invalid responses that are still accepted
// by common browsers.
// Always using Z_SYNC_FLUSH is what cURL does.
// /!\ This is disabled for now, because it seems broken in recent node
// const zlibOptions = {
// flush: zlib.Z_SYNC_FLUSH,
// finishFlush: zlib.Z_SYNC_FLUSH
// }
if (codings === 'gzip' || codings === 'x-gzip') {
// for gzip
body = body.pipe(zlib.createGunzip());
} else if (codings === 'deflate' || codings === 'x-deflate') {
// for deflate
// handle the infamous raw deflate response from old servers
// a hack for old IIS and Apache servers
const raw = res.pipe(new PassThrough());
return raw.once('data', chunk => {
// see http://stackoverflow.com/questions/37519828
if ((chunk[0] & 0x0F) === 0x08) {
body = body.pipe(zlib.createInflate());
} else {
body = body.pipe(zlib.createInflateRaw());
}
const response = new Response(body, responseOptions);
resolve$1(response);
});
}
}
const response = new Response(body, responseOptions);
resolve$1(response);
});
writeToStream(req, request);
}));
}
Example #9
Source File: context.js From aresrpg with MIT License | 4 votes |
/**
* The following code handle the pipeline, it works as following
*
* state = initial_state
* on packets + on actions
* |> transform_action
* |> (state = reduce_state(state))
*
* @param {import('minecraft-protocol').Client} client
*/
export async function create_context(client) {
log.info(
{
username: client.username,
uuid: client.uuid,
id: client.id,
},
'Client connected'
)
const controller = new AbortController()
const actions = new PassThrough({ objectMode: true })
client.once('end', () => {
log.info(
{ username: client.username, uuid: client.uuid },
'Client disconnected'
)
actions.end()
controller.abort()
})
client.on('error', error => log.error(error, 'Client error'))
const packets = aiter(
abortable(on(client, 'packet', { signal: controller.signal }))
).map(([payload, { name }]) => ({
type: `packet/${name}`,
payload,
}))
const save_state = state => {
log.info(
{ username: client.username, uuid: client.uuid },
'Saving to database'
)
Database.push({
key: client.uuid.toLowerCase(),
value: saved_state(state),
})
}
/** @type {NodeJS.EventEmitter} */
const events = new EventEmitter()
const player_state = await Database.pull(client.uuid.toLowerCase())
aiter(combineAsyncIterators(actions[Symbol.asyncIterator](), packets))
.map(transform_action)
.reduce(
(last_state, action) => {
const state = reduce_state(last_state, action)
events.emit(Context.STATE, state)
return state
},
// default nickname is the client username, and is overriden by the loaded player state
{
...initial_state,
nickname: client.username,
...player_state,
last_connection_time: Date.now(),
}
)
.then(state => ({
...state,
last_disconnection_time: Date.now(),
}))
.then(save_state)
.catch(error => {
// TODO: what to do here if we can't save the client ?
log.error(error, 'State error')
})
const get_state = last_event_value(events, Context.STATE)
return {
client,
world,
events,
signal: controller.signal,
get_state,
inside_view: inside_view(get_state),
dispatch(type, payload) {
actions.write({ type, payload })
},
}
}
Example #10
Source File: index.es.js From Lynx with MIT License | 4 votes |
/**
* Fetch function
*
* @param Mixed url Absolute url or Request instance
* @param Object opts Fetch options
* @return Promise
*/
function fetch(url, opts) {
// allow custom promise
if (!fetch.Promise) {
throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
}
Body.Promise = fetch.Promise;
// wrap http.request into fetch
return new fetch.Promise(function (resolve$$1, reject) {
// build request object
const request = new Request(url, opts);
const options = getNodeRequestOptions(request);
const send = (options.protocol === 'https:' ? https : http).request;
// send request
const req = send(options);
let reqTimeout;
function finalize() {
req.abort();
clearTimeout(reqTimeout);
}
if (request.timeout) {
req.once('socket', function (socket) {
reqTimeout = setTimeout(function () {
reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
finalize();
}, request.timeout);
});
}
req.on('error', function (err) {
reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
finalize();
});
req.on('response', function (res) {
clearTimeout(reqTimeout);
const headers = createHeadersLenient(res.headers);
// HTTP fetch step 5
if (fetch.isRedirect(res.statusCode)) {
// HTTP fetch step 5.2
const location = headers.get('Location');
// HTTP fetch step 5.3
const locationURL = location === null ? null : resolve(request.url, location);
// HTTP fetch step 5.5
switch (request.redirect) {
case 'error':
reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect'));
finalize();
return;
case 'manual':
// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
if (locationURL !== null) {
headers.set('Location', locationURL);
}
break;
case 'follow':
// HTTP-redirect fetch step 2
if (locationURL === null) {
break;
}
// HTTP-redirect fetch step 5
if (request.counter >= request.follow) {
reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
finalize();
return;
}
// HTTP-redirect fetch step 6 (counter increment)
// Create a new Request object.
const requestOpts = {
headers: new Headers(request.headers),
follow: request.follow,
counter: request.counter + 1,
agent: request.agent,
compress: request.compress,
method: request.method,
body: request.body
};
// HTTP-redirect fetch step 9
if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
finalize();
return;
}
// HTTP-redirect fetch step 11
if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
requestOpts.method = 'GET';
requestOpts.body = undefined;
requestOpts.headers.delete('content-length');
}
// HTTP-redirect fetch step 15
resolve$$1(fetch(new Request(locationURL, requestOpts)));
finalize();
return;
}
}
// prepare response
let body = res.pipe(new PassThrough());
const response_options = {
url: request.url,
status: res.statusCode,
statusText: res.statusMessage,
headers: headers,
size: request.size,
timeout: request.timeout
};
// HTTP-network fetch step 12.1.1.3
const codings = headers.get('Content-Encoding');
// HTTP-network fetch step 12.1.1.4: handle content codings
// in following scenarios we ignore compression support
// 1. compression support is disabled
// 2. HEAD request
// 3. no Content-Encoding header
// 4. no content response (204)
// 5. content not modified response (304)
if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
resolve$$1(new Response(body, response_options));
return;
}
// For Node v6+
// Be less strict when decoding compressed responses, since sometimes
// servers send slightly invalid responses that are still accepted
// by common browsers.
// Always using Z_SYNC_FLUSH is what cURL does.
const zlibOptions = {
flush: zlib.Z_SYNC_FLUSH,
finishFlush: zlib.Z_SYNC_FLUSH
};
// for gzip
if (codings == 'gzip' || codings == 'x-gzip') {
body = body.pipe(zlib.createGunzip(zlibOptions));
resolve$$1(new Response(body, response_options));
return;
}
// for deflate
if (codings == 'deflate' || codings == 'x-deflate') {
// handle the infamous raw deflate response from old servers
// a hack for old IIS and Apache servers
const raw = res.pipe(new PassThrough());
raw.once('data', function (chunk) {
// see http://stackoverflow.com/questions/37519828
if ((chunk[0] & 0x0F) === 0x08) {
body = body.pipe(zlib.createInflate());
} else {
body = body.pipe(zlib.createInflateRaw());
}
resolve$$1(new Response(body, response_options));
});
return;
}
// otherwise, use response as-is
resolve$$1(new Response(body, response_options));
});
writeToStream(req, request);
});
}