zlib#createGunzip TypeScript Examples
The following examples show how to use
zlib#createGunzip.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: 1631140092497-userCountries.ts From Corsace with MIT License | 6 votes |
public async up (queryRunner: QueryRunner): Promise<void> {
// Old migration code, super slow and doesn't work for restricted users
if (process.env.NODE_ENV === "production") {
await queryRunner.query("ALTER TABLE `user` ADD `country` tinytext NOT NULL");
const users = await User
.createQueryBuilder("user")
.getMany();
await Promise.all(users.map(async user => {
const apiUser = (await osuClient.user.get(user.osu.userID)) as APIUser;
user.country = apiUser.country.toString();
return user.save();
}));
} else {
const bigSql = await streamToString(createReadStream(resolve(__dirname, "1631140092497-userCountries.sql.gz")).pipe(createGunzip()));
const sqlInstructions = bigSql.split("\n").filter(sql => sql.trim().length !== 0);
for(const sqlInstruction of sqlInstructions)
if(sqlInstruction.trim().length !== 0)
await queryRunner.query(sqlInstruction);
}
}
Example #2
Source File: 1614784355565-MCA2020seeding.ts From Corsace with MIT License | 5 votes |
public async up (queryRunner: QueryRunner): Promise<void> {
const bigSql = await streamToString(createReadStream(resolve(__dirname, "1614784355565-MCA2020seeding.sql.gz")).pipe(createGunzip()));
const sqlInstructions = bigSql.split("\n").filter(sql => sql.trim().length !== 0);
for(const sqlInstruction of sqlInstructions)
if(sqlInstruction.trim().length !== 0)
await queryRunner.query(sqlInstruction);
}
Example #3
Source File: metricsFromAuth0.ts From clarity with Apache License 2.0 | 4 votes |
private async accountKeyStats() {
// https://auth0.github.io/node-auth0/module-management.ManagementClient.html#exportUsers
this.managementClient.exportUsers(
{
format: 'json',
fields: [
{
name: 'user_metadata.accounts',
export_as: 'userAccount'
},
{
name: 'email',
export_as: 'email'
}
]
},
async (err, job) => {
if (err) {
console.error(err);
} else {
const jobId = job.id;
const intervalId = setInterval(async () => {
const jobInfo = (await this.managementClient.getJob({
id: jobId
})) as ExportUsersJob;
switch (jobInfo.status) {
case 'pending':
break;
case 'processing':
break;
case 'completed':
clearInterval(intervalId);
let buffer = '';
request(jobInfo.location!)
.pipe(createGunzip())
.on('data', (d: any) => {
buffer += d;
})
.on('end', async () => {
let totalKey = 0;
let totalUser = 0;
const keyCountMap = new Map();
buffer
.trim()
.split('\n')
.forEach(b => {
try {
const userInfo = JSON.parse(b);
const userKeyAccount =
userInfo?.userAccount?.length || 0;
totalKey += userKeyAccount;
totalUser += 1;
keyCountMap.set(
userKeyAccount,
(keyCountMap.get(userKeyAccount) || 0) + 1
);
} catch {
// do nothing
console.error('failed to parse exported file');
}
});
const dailyState = await this.userStats();
this.clarityMetrics.accountKeyGauge.set(totalKey);
this.clarityMetrics.accountGauge.set(totalUser);
this.clarityMetrics.dailyLoginGauge.set(
dailyState[0].logins || 0
);
this.clarityMetrics.dailySignupGauge.set(
dailyState[0].signups || 0
);
return;
});
break;
case 'failed':
clearInterval(intervalId);
console.error(`job ${job.id} failed`);
break;
}
}, 10 * 1000);
}
}
);
}