fs/promises#writeFile TypeScript Examples
The following examples show how to use
fs/promises#writeFile.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: index.ts From fetch-gh-release-asset with MIT License | 7 votes |
baseFetchAssetFile = async (
octokit: ReturnType<typeof github.getOctokit>,
{ id, outputPath, owner, repo, token }: FetchAssetFileOptions
) => {
const {
body,
headers: { accept, 'user-agent': userAgent },
method,
url,
} = octokit.request.endpoint(
'GET /repos/:owner/:repo/releases/assets/:asset_id',
{
asset_id: id,
headers: {
accept: 'application/octet-stream',
},
owner,
repo,
}
);
let headers: HeadersInit = {
accept,
authorization: `token ${token}`,
};
if (typeof userAgent !== 'undefined')
headers = { ...headers, 'user-agent': userAgent };
const response = await fetch(url, { body, headers, method });
if (!response.ok) {
const text = await response.text();
core.warning(text);
throw new Error('Invalid response');
}
const blob = await response.blob();
const arrayBuffer = await blob.arrayBuffer();
await mkdir(dirname(outputPath), { recursive: true });
void (await writeFile(outputPath, new Uint8Array(arrayBuffer)));
}
Example #2
Source File: file-writer.ts From xiome with MIT License | 6 votes |
export function makeFileWriter(root: string) {
return {
async write(path: string, html: HtmlTemplate) {
const path2 = join(root, path)
await mkdir(dirname(path2), {recursive: true})
return writeFile(path2, html.toString(), "utf-8")
}
}
}
Example #3
Source File: generate.ts From iuliia-js with MIT License | 6 votes |
async function generate() {
const validator = await createValidator();
const jsonFiles = (await readdir(INPUT_DIR)).filter((file) => file.endsWith(".json"));
for (const file of jsonFiles) {
const inputFile = join(INPUT_DIR, file);
const outputFile = join(OUTPUT_DIR, basename(file, ".json") + ".ts");
console.log(`Writing file: ${inputFile}`);
const output = await jsonToTS(inputFile, validator);
await writeFile(outputFile, output, "utf8");
}
console.log(`Writing file: ${DEFINITIONS_FILE}`);
const moduleNames = jsonFiles.map((file) => basename(file, ".json"));
const definitions = createDefinitions(moduleNames);
await writeFile(DEFINITIONS_FILE, definitions, "utf8");
}
Example #4
Source File: gen-rsa-keys.ts From mtcute with GNU Lesser General Public License v3.0 | 6 votes |
async function main() {
const crypto = new NodeCryptoProvider()
const obj: Record<string, TlPublicKey> = {}
for await (const key of parseInputFile()) {
const parsed = await parsePublicKey(crypto, key.pem, key.kind === 'old')
obj[parsed.fingerprint] = parsed
}
await writeFile(
OUT_JS_FILE,
ESM_PRELUDE +
"exports.default=JSON.parse('" +
JSON.stringify(obj) +
"');"
)
}
Example #5
Source File: gen-code.ts From mtcute with GNU Lesser General Public License v3.0 | 6 votes |
async function generateWriters(
apiSchema: TlFullSchema,
mtpSchema: TlFullSchema
) {
console.log('Generating writers...')
let code = generateWriterCodeForTlEntries(apiSchema.entries, 'r')
const mtpCode = generateWriterCodeForTlEntries(mtpSchema.entries, '', false)
code = code.substring(0, code.length - 1) + mtpCode.substring(7)
code += '\nexports.default = r;'
await writeFile(OUT_WRITERS_FILE, ESM_PRELUDE + code)
}
Example #6
Source File: gen-code.ts From mtcute with GNU Lesser General Public License v3.0 | 6 votes |
async function generateReaders(
apiSchema: TlFullSchema,
mtpSchema: TlFullSchema
) {
console.log('Generating readers...')
let code = generateReaderCodeForTlEntries(apiSchema.entries, 'r', false)
const mtpCode = generateReaderCodeForTlEntries(mtpSchema.entries, '')
code = code.substring(0, code.length - 1) + mtpCode.substring(7)
code += '\nexports.default = r;'
await writeFile(OUT_READERS_FILE, ESM_PRELUDE + code)
}
Example #7
Source File: gen-code.ts From mtcute with GNU Lesser General Public License v3.0 | 6 votes |
async function generateTypings(
apiSchema: TlFullSchema,
apiLayer: number,
mtpSchema: TlFullSchema,
errors: TlErrors
) {
console.log('Generating typings...')
const [apiTs, apiJs] = generateTypescriptDefinitionsForTlSchema(
apiSchema,
apiLayer,
undefined,
errors
)
const [mtpTs, mtpJs] = generateTypescriptDefinitionsForTlSchema(
mtpSchema,
0,
'mtp',
errors
)
await writeFile(
OUT_TYPINGS_FILE,
apiTs + '\n\n' + mtpTs.replace("import _Long from 'long';", '')
)
await writeFile(OUT_TYPINGS_JS_FILE, ESM_PRELUDE + apiJs + '\n\n' + mtpJs)
}
Example #8
Source File: fetch-mtp.ts From mtcute with GNU Lesser General Public License v3.0 | 6 votes |
async function main() {
console.log('Downloading MTProto schema...')
const schema = await fetchMtprotoSchema()
console.log('Parsing...')
let entries = parseTlToEntries(schema, {
prefix: 'mt_',
applyPrefixToArguments: true,
})
// remove manually parsed types
entries = entries.filter(
(it) =>
[
'mt_msg_container',
'mt_message',
'mt_msg_copy',
'mt_gzip_packed',
].indexOf(it.name) === -1
)
const rpcResult = entries.find((it) => it.name === 'mt_rpc_result')!
rpcResult.arguments.forEach((arg) => {
if (arg.name === 'result') {
arg.type = 'any'
}
})
// mtproto is handled internally, for simplicity we make them all classes
entries.forEach((entry) => {
entry.kind = 'class'
})
console.log('Writing to file...')
await writeFile(MTP_SCHEMA_JSON_FILE, JSON.stringify(entries))
console.log('Done!')
}
Example #9
Source File: fetch-errors.ts From mtcute with GNU Lesser General Public License v3.0 | 6 votes |
async function main() {
const errors: TlErrors = {
base: baseErrors,
errors: {},
throws: {},
userOnly: {},
}
console.log('Fetching errors from Telegram...')
await fetchFromTelegram(errors)
// using some incredible fucking crutches we are also able to parse telethon errors file
// and add missing error descriptions
console.log('Fetching errors from Telethon...')
await fetchFromTelethon(errors)
virtualErrors.forEach((err) => {
if (errors.errors[err.name]) {
console.log(`Error ${err.name} already exists and is not virtual`)
return
}
errors.errors[err.name] = err
})
console.log('Saving...')
await writeFile(ERRORS_JSON_FILE, JSON.stringify(errors))
}
Example #10
Source File: fetch-api.ts From mtcute with GNU Lesser General Public License v3.0 | 6 votes |
async function updateReadme(currentLayer: number) {
const oldReadme = await readFile(README_MD_FILE, 'utf8')
const today = new Date().toLocaleDateString('ru')
await writeFile(
README_MD_FILE,
oldReadme.replace(
/^Generated from TL layer \*\*\d+\*\* \(last updated on \d+\.\d+\.\d+\)\.$/m,
`Generated from TL layer **${currentLayer}** (last updated on ${today}).`
)
)
}
Example #11
Source File: fetch-api.ts From mtcute with GNU Lesser General Public License v3.0 | 5 votes |
async function updatePackageVersion(
rl: readline.Interface,
currentLayer: number
) {
const packageJson = JSON.parse(await readFile(PACKAGE_JSON_FILE, 'utf8'))
const version: string = packageJson.version
let [major, minor] = version.split('.').map((i) => parseInt(i))
if (major === currentLayer) {
console.log('Current version: %s. Bump minor version?', version)
const res = await input(rl, '[Y/n] > ')
if (res.trim().toLowerCase() === 'n') {
return
}
} else {
major = currentLayer
minor = 0
}
console.log('Updating package version...')
const versionStr = `${major}.${minor}.0`
packageJson.version = versionStr
await writeFile(PACKAGE_JSON_FILE, JSON.stringify(packageJson, null, 4))
console.log('Updating dependant packages...')
for (const dir of await readdir(PACKAGES_DIR, { withFileTypes: true })) {
if (!dir.isDirectory()) continue
const pkgFile = join(PACKAGES_DIR, dir.name, 'package.json')
let pkg
try {
pkg = JSON.parse(await readFile(pkgFile, 'utf8'))
} catch (e: any) {
if (e.code === 'ENOENT') continue
throw e
}
if (pkg.dependencies && '@mtcute/tl' in pkg.dependencies) {
pkg.dependencies['@mtcute/tl'] = 'workspace:' + versionStr
}
if (pkg.devDependencies && '@mtcute/tl' in pkg.devDependencies) {
pkg.devDependencies['@mtcute/tl'] = 'workspace:' + versionStr
}
await writeFile(pkgFile, JSON.stringify(pkg, null, 4) + '\n')
}
// because i am fucking dumb and have adhd and always forget it lol
console.log(
'Done! Please make sure packages compile before committing and pushing'
)
}
Example #12
Source File: settings.ts From sapio-studio with Mozilla Public License 2.0 | 5 votes |
preferences: {
data: Data;
save: (which: Prefs, data: object) => Promise<boolean>;
load_preferences: (which: Prefs) => Promise<void>;
initialize: () => Promise<void>;
} = {
data: fill_in_default(),
initialize: async () => {
for (const key of pref_array)
try {
await preferences.load_preferences(key);
} catch {}
},
save: async (which: Prefs, data: object) => {
const conf = path.resolve(
app.getPath('userData'),
which + '_preferences.json'
);
switch (which) {
case 'bitcoin':
case 'display':
case 'local_oracle':
case 'sapio_cli':
await writeFile(conf, JSON.stringify(data));
await preferences.load_preferences(which);
await custom_sapio_config();
break;
default:
return Promise.reject('Bad Request');
}
switch (which) {
case 'bitcoin':
deinit_bitcoin_node();
get_bitcoin_node();
break;
case 'local_oracle':
kill_emulator();
start_sapio_oracle();
break;
}
return true;
},
load_preferences: async (which: Prefs) => {
const conf = path.resolve(
app.getPath('userData'),
which + '_preferences.json'
);
preferences.data[which] = JSON.parse(
await readFile(conf, { encoding: 'utf-8' })
);
},
}
Example #13
Source File: sapio.ts From sapio-studio with Mozilla Public License 2.0 | 5 votes |
async create_contract(
workspace_name: string,
which: string,
txn: string | null,
args: string
): Promise<Result<string | null>> {
const workspace = await SapioWorkspace.new(workspace_name);
let create, created, bound;
const args_h = Bitcoin.crypto.sha256(Buffer.from(args)).toString('hex');
// Unique File Name of Time + Args + Module
const fname = `${which.substring(0, 16)}-${args_h.substring(
0,
16
)}-${new Date().getTime()}`;
const file = await workspace.contract_output_path(fname);
const write_str = (to: string, data: string) =>
writeFile(path.join(file, to), data, { encoding: 'utf-8' });
const w_arg = write_str('args.json', args);
const w_mod = write_str(
'module.json',
JSON.stringify({ module: which })
);
const sc = await sapio.show_config();
if ('err' in sc) return Promise.reject('Error getting config');
const w_settings = write_str('settings.json', sc.ok);
Promise.all([w_arg, w_mod, w_settings]);
try {
create = await SapioCompiler.command([
'contract',
'create',
'--key',
which,
args,
'--workspace',
workspace.workspace_location(),
]);
} catch (e) {
console.debug('Failed to Create', which, args);
return { ok: null };
}
if ('err' in create) {
write_str('create_error.json', JSON.stringify(create));
return create;
}
created = create.ok;
const w_create = write_str('create.json', create.ok);
Promise.all([w_create]);
let bind;
try {
const bind_args = ['contract', 'bind', '--base64_psbt'];
if (txn) bind_args.push('--txn', txn);
bind_args.push(created);
bind = await SapioCompiler.command(bind_args);
} catch (e: any) {
console.debug(created);
console.log('Failed to bind', e.toString());
return { ok: null };
}
if ('err' in bind) {
console.log(['bind'], typeof bind, bind);
write_str('bind_error.json', JSON.stringify(bind));
return bind;
}
const w_bound = write_str('bound.json', bind.ok);
await w_bound;
console.debug(bound);
return bind;
}
Example #14
Source File: generate.ts From iuliia-js with MIT License | 5 votes |
async function generateSchemaDefinitionInterface() {
console.log(`Writing file: ${TRANSLITERATION_SCHEMA_FILE}`);
const schemaDefinitionInterfaceSource = await compileFromFile(JSON_SCHEMA_FILE);
await writeFile(TRANSLITERATION_SCHEMA_FILE, schemaDefinitionInterfaceSource, "utf8");
}
Example #15
Source File: cache.ts From cloudmusic-vscode with MIT License | 5 votes |
static put(key: string, data: LyricCacheItem): void {
try {
void writeFile(
resolve(LYRIC_CACHE_DIR, key),
Buffer.from(JSON.stringify(data), "utf8")
);
} catch {}
}
Example #16
Source File: documentation.ts From mtcute with GNU Lesser General Public License v3.0 | 5 votes |
async function main() {
let cached = await getCachedDocumentation()
if (cached) {
console.log('Cached documentation: %d', cached.updated)
}
const rl = require('readline').createInterface({
input: process.stdin,
output: process.stdout,
})
const input = (q: string): Promise<string> =>
new Promise((res) => rl.question(q, res))
while (true) {
console.log('Choose action:')
console.log('0. Exit')
console.log('1. Update documentation')
console.log('2. Apply descriptions.yaml')
console.log('3. Apply documentation to schema')
const act = parseInt(await input('[0-3] > '))
if (isNaN(act) || act < 0 || act > 3) {
console.log('Invalid action')
continue
}
if (act === 0) return
if (act === 1) {
const [schema, layer] = unpackTlSchema(
JSON.parse(await readFile(API_SCHEMA_JSON_FILE, 'utf8'))
)
cached = await fetchDocumentation(schema, layer)
}
if (act === 2) {
if (!cached) {
console.log('No schema available, fetch it first')
continue
}
const descriptionsYaml = jsYaml.load(
await readFile(DESCRIPTIONS_YAML_FILE, 'utf8')
)
applyDescriptionsYamlFile(cached, descriptionsYaml)
await writeFile(DOC_CACHE_FILE, JSON.stringify(cached))
}
if (act === 3) {
if (!cached) {
console.log('No schema available, fetch it first')
continue
}
const [schema, layer] = unpackTlSchema(
JSON.parse(await readFile(API_SCHEMA_JSON_FILE, 'utf8'))
)
applyDocumentation(schema, cached)
await writeFile(
API_SCHEMA_JSON_FILE,
JSON.stringify(packTlSchema(schema, layer))
)
}
}
}
Example #17
Source File: tsc.ts From solita with Apache License 2.0 | 5 votes |
async function writeTsconfig(parentToGeneratedDir: string) {
const tsconfigPath = path.join(parentToGeneratedDir, 'tsconfig.json')
await writeFile(tsconfigPath, JSON.stringify(config, null, 2))
}
Example #18
Source File: identity.ts From hoprnet with GNU General Public License v3.0 | 5 votes |
/**
* Persistently store identity on disk
* @param path file systme path to store identity
* @param id serialized private key
*/
async function storeIdentity(path: string, id: Uint8Array) {
await writeFile(resolve(path), id)
}
Example #19
Source File: slimDeployments.ts From hoprnet with GNU General Public License v3.0 | 5 votes |
main: DeployFunction = async function (hre: HardhatRuntimeEnvironment) {
const basePath = join(
__dirname,
'..',
'deployments',
hre.environment,
hre.network.name === 'hardhat' ? 'localhost' : hre.network.name
)
let contracts: string[]
try {
contracts = (await readdir(basePath)).filter((filename: string) => filename.endsWith('.json'))
} catch (err) {
// Ignore missing deployments in unit tests
if (hre.network.name === 'hardhat' && err.code === 'ENOENT') {
return
}
throw err
}
for (const contract of contracts) {
const filePath = join(basePath, contract)
const data = require(filePath)
const contractName = contract.replace('.json', '')
const compilerData =
(await hre.artifacts.getBuildInfo(`contracts/${contractName}.sol:${contractName}`)) ?? data.compilerData
// sometimes not all contracts are deployed, depends on the deployment scripts
if (!compilerData) continue
const slimmed = {
address: data.address,
transactionHash: data.transactionHash,
blockNumber: data.receipt ? data.receipt.blockNumber : data.blockNumber,
metadata: {
solcVersion: compilerData.solcVersion,
input: compilerData.input
},
abi: data.abi
}
await writeFile(filePath, JSON.stringify(slimmed, null, 2))
}
}
Example #20
Source File: DatabaseUtil.ts From mooncord with MIT License | 5 votes |
protected async writeDump() {
await writeFile(path.resolve(__dirname, '../temp/database_dump.json'), JSON.stringify(database, null, 4), { encoding: 'utf8', flag: 'w+' })
logSuccess('Dumped Database!')
}
Example #21
Source File: DesktopConfigService.ts From rewind with MIT License | 5 votes |
async saveOsuStablePath(osuStableFolderPath: string) {
const data: RewindDesktopConfig = {
osuPath: osuStableFolderPath,
};
return writeFile(this.userConfigPath, JSON.stringify(data));
}
Example #22
Source File: cache.ts From cloudmusic-vscode with MIT License | 5 votes |
static async store(): Promise<void> {
const json = JSON.stringify(this._list.toArray());
await writeFile(this._listPath, json).catch(logError);
}
Example #23
Source File: fetch-api.ts From mtcute with GNU Lesser General Public License v3.0 | 4 votes |
async function main() {
console.log('Loading schemas...')
const schemas: Schema[] = [
await fetchTdlibSchema(),
await fetchTdesktopSchema(),
await fetchCoreSchema(),
await fetchCoreSchema(COREFORK_DOMAIN, 'Corefork'),
{
name: 'Custom',
layer: 0, // handled manually
content: tlToFullSchema(
await readFile(join(__dirname, '../data/custom.tl'), 'utf8')
),
},
]
console.log('Available schemas:')
schemas.forEach((schema) =>
console.log(
' - %s (layer %d): %d entries',
schema.name,
schema.layer,
schema.content.entries.length
)
)
const resultLayer = Math.max(...schemas.map((it) => it.layer))
console.log(`Final schema will be on layer ${resultLayer}. Merging...`)
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
})
const resultSchema = await mergeTlSchemas(
schemas.map((it) => it.content),
async (_options) => {
const options: ConflictOption[] = _options.map((it, idx) => ({
schema: schemas[idx],
entry: it,
}))
let chooseOptions: ConflictOption[] = []
const customEntry = options[options.length - 1]
if (customEntry.entry) {
// if there is custom entry in conflict, we must present it, otherwise something may go wrong
chooseOptions = options
} else {
// first of all, prefer entries from the latest layer
const fromLastSchema = options.filter(
(opt) => opt.schema.layer === resultLayer
)
// if there is only one schema on the latest layer, we can simply return it
if (fromLastSchema.length === 1) return fromLastSchema[0].entry
// there are multiple choices on the latest layer
// if they are all the same, it's just conflict between layers,
// and we can merge the ones from the latest layer
const mergedEntry = mergeTlEntries(
fromLastSchema
.filter((opt) => opt.entry)
.map((opt) => opt.entry!)
)
if (typeof mergedEntry === 'string') {
// merge failed, so there is in fact some conflict
chooseOptions = fromLastSchema
} else return mergedEntry
}
let nonEmptyOptions = chooseOptions.filter((opt) => opt.entry)
console.log(
'Conflict detected at %s %s:',
nonEmptyOptions[0].entry!.kind,
nonEmptyOptions[0].entry!.name
)
console.log('0. Remove')
nonEmptyOptions.forEach((opt, idx) => {
console.log(
`${idx + 1}. ${opt.schema.name}: ${writeTlEntryToString(
opt.entry!
)}`
)
})
while (true) {
const res = parseInt(
await input(rl, `[0-${nonEmptyOptions.length}] > `)
)
if (isNaN(res) || res < 0 || res > nonEmptyOptions.length)
continue
if (res === 0) return undefined
return nonEmptyOptions[res - 1].entry
}
}
)
console.log(
'Done! Final schema contains %d entries',
resultSchema.entries.length
)
let docs = await getCachedDocumentation()
if (docs) {
console.log('Cached documentation from %s, use it?', docs.updated)
const res = await input(rl, '[Y/n] > ')
if (res.trim().toLowerCase() === 'n') {
docs = null
}
}
if (docs === null) {
console.log('Downloading documentation...')
docs = await fetchDocumentation(resultSchema, resultLayer)
}
applyDocumentation(resultSchema, docs)
await overrideInt53(resultSchema)
console.log('Writing result to file...')
await writeFile(
API_SCHEMA_JSON_FILE,
JSON.stringify(packTlSchema(resultSchema, resultLayer))
)
console.log('Updating README.md...')
await updateReadme(resultLayer)
await updatePackageVersion(rl, resultLayer)
rl.close()
console.log('Done!')
}
Example #24
Source File: documentation.ts From mtcute with GNU Lesser General Public License v3.0 | 4 votes |
export async function fetchDocumentation(
schema: TlFullSchema,
layer: number,
silent = !process.stdout.isTTY
): Promise<CachedDocumentation> {
const headers = {
cookie: `stel_dev_layer=${layer}`,
'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) ' +
'Chrome/87.0.4280.88 Safari/537.36',
}
const index = await fetchRetry(`${CORE_DOMAIN}/schema`, { headers })
const actualLayer = cheerio
.load(index)('.dev_layer_select .dropdown-toggle')
.text()
.match(/layer (\d+)/i)![1]
const ret: CachedDocumentation = {
updated: `${new Date().toLocaleString('ru-RU')} (layer ${actualLayer})`,
classes: {},
methods: {},
unions: {},
}
let prevSize = 0
let logPos = 0
function log(str: string) {
if (silent) return
while (str.length < prevSize) str += ' '
process.stdout.write('\r' + PROGRESS_CHARS[logPos] + ' ' + str)
prevSize = str.length
logPos = (logPos + 1) % PROGRESS_CHARS.length
}
for (const entry of schema.entries) {
log(`? ${entry.kind} ${entry.name}`)
const url = `${CORE_DOMAIN}/${
entry.kind === 'class' ? 'constructor' : 'method'
}/${entry.name}`
const html = await fetchRetry(url, {
headers,
})
const $ = cheerio.load(html)
const content = $('#dev_page_content')
if (content.text().trim() === 'The page has not been saved') continue
normalizeLinks(url, content)
const retClass: CachedDocumentationEntry = {}
const description = extractDescription($)
if (description) {
retClass.comment = description
}
const parametersTable = $('#parameters').parent().next('table')
parametersTable.find('tr').each((idx, _el) => {
const el = $(_el)
const cols = el.find('td')
if (!cols.length) return // <thead>
const name = cols.first().text().trim()
const description = cols.last().html()!.trim()
if (description) {
if (!retClass.arguments) retClass.arguments = {}
retClass.arguments[name] = description
}
})
if (entry.kind === 'method') {
const errorsTable = $('#possible-errors').parent().next('table')
let userBotRequired = false
errorsTable.find('tr').each((idx, _el) => {
const el = $(_el)
let cols = el.find('td')
if (!cols.length) return // <thead>
let code = parseInt($(cols[0]).text())
let name = $(cols[1]).text()
let comment = $(cols[2]).text()
if (name === 'USER_BOT_REQUIRED') userBotRequired = true
if (!retClass.throws) retClass.throws = []
retClass.throws.push({ code, name, comment })
})
const botsCanUse = !!$('#bots-can-use-this-method').length
const onlyBotsCanUse =
botsCanUse &&
(!!description.match(/[,;]( for)? bots only$/) ||
userBotRequired)
retClass.available = onlyBotsCanUse
? 'bot'
: botsCanUse
? 'both'
: 'user'
}
ret[entry.kind === 'class' ? 'classes' : 'methods'][entry.name] =
retClass
}
for (const name in schema.unions) {
if (!schema.unions.hasOwnProperty(name)) continue
log(`? union ${name}`)
const url = `${CORE_DOMAIN}/type/${name}`
const html = await fetchRetry(url, {
headers,
})
const $ = cheerio.load(html)
const content = $('#dev_page_content')
if (content.text().trim() === 'The page has not been saved') continue
normalizeLinks(url, content)
const description = extractDescription($)
if (description) ret.unions[name] = description
}
log('✨ Patching descriptions')
const descriptionsYaml = jsYaml.load(
await readFile(DESCRIPTIONS_YAML_FILE, 'utf8')
)
applyDescriptionsYamlFile(ret, descriptionsYaml)
log('? Writing to file')
await writeFile(DOC_CACHE_FILE, JSON.stringify(ret))
if (!silent) process.stdout.write('\n')
return ret
}
Example #25
Source File: QBittorrent.ts From cross-seed with Apache License 2.0 | 4 votes |
async inject(
newTorrent: Metafile,
searchee: Searchee
): Promise<InjectionResult> {
if (await this.isInfoHashInClient(newTorrent.infoHash)) {
return InjectionResult.ALREADY_EXISTS;
}
const buf = parseTorrent.toTorrentFile(newTorrent);
const filename = `${newTorrent.name}.cross-seed.torrent`;
const tempFilepath = join(tmpdir(), filename);
await writeFile(tempFilepath, buf, { mode: 0o644 });
try {
const { save_path, isComplete, autoTMM, category } =
await this.getTorrentConfiguration(searchee);
if (!isComplete) return InjectionResult.TORRENT_NOT_COMPLETE;
const shouldManuallyEnforceContentLayout =
isSingleFileTorrent(newTorrent) &&
(await this.isSubfolderContentLayout(searchee));
const file = await fileFrom(
tempFilepath,
"application/x-bittorrent"
);
const formData = new FormData();
formData.append("torrents", file, filename);
formData.append("tags", "cross-seed");
formData.append("category", category);
if (autoTMM) {
formData.append("autoTMM", "true");
} else {
formData.append("autoTMM", "false");
formData.append("savepath", save_path);
}
if (shouldManuallyEnforceContentLayout) {
formData.append("contentLayout", "Subfolder");
formData.append("skip_checking", "false");
formData.append("paused", "true");
} else {
formData.append("skip_checking", "true");
formData.append("paused", "false");
}
// for some reason the parser parses the last kv pair incorrectly
// it concats the value and the sentinel
formData.append("foo", "bar");
await this.request("/torrents/add", formData);
if (shouldManuallyEnforceContentLayout) {
await this.request(
"/torrents/recheck",
`hashes=${newTorrent.infoHash}`,
X_WWW_FORM_URLENCODED
);
await this.request(
"/torrents/resume",
`hashes=${newTorrent.infoHash}`,
X_WWW_FORM_URLENCODED
);
}
unlink(tempFilepath).catch((error) => {
logger.debug(error);
});
return InjectionResult.SUCCESS;
} catch (e) {
logger.debug({
label: Label.QBITTORRENT,
message: `injection failed: ${e.message}`,
});
return InjectionResult.FAILURE;
}
}
Example #26
Source File: gitlab.ts From command-bot with Apache License 2.0 | 4 votes |
runCommandInGitlabPipeline = async (ctx: Context, task: Task) => {
const { logger, gitlab } = ctx
const cmdRunner = new CommandRunner(ctx, {
itemsToRedact: [gitlab.accessToken],
cwd: task.repoPath,
})
/*
Save the head SHA before doing any modifications to the branch so that
scripts will be able to restore the branch as it was on GitHub
*/
const headSha = await cmdRunner.run("git", ["rev-parse", "HEAD"])
const getPipelineScriptsCloneCommand = ({
withRef,
}: {
withRef: boolean
}) => {
return `git clone --depth 1 ${
withRef ? `--branch "$PIPELINE_SCRIPTS_REF"` : ""
} "$PIPELINE_SCRIPTS_REPOSITORY" "$PIPELINE_SCRIPTS_DIR"`
}
const jobTaskInfoMessage = (() => {
switch (task.tag) {
case "PullRequestTask": {
return `The task was generated from a comment in ${task.comment.htmlUrl}`
}
case "ApiTask": {
return `The task was generated from an API call by ${task.requester}`
}
default: {
const exhaustivenessCheck: never = task
throw new Error(
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
`jobTaskInfoMessage is not exhaustive: ${exhaustivenessCheck}`,
)
}
}
})()
const artifactsFolderPath = ".git/.artifacts"
await writeFile(
path.join(task.repoPath, ".gitlab-ci.yml"),
yaml.stringify({
workflow: {
rules: [
{ if: `$CI_PIPELINE_SOURCE == "api"` },
{ if: `$CI_PIPELINE_SOURCE == "web"` },
],
},
command: {
...task.gitlab.job,
script: [
`echo "This job is related to task ${task.id}. ${jobTaskInfoMessage}."`,
// prettier-ignore
'if [ "${PIPELINE_SCRIPTS_REPOSITORY:-}" ]; then ' +
'if [ "${PIPELINE_SCRIPTS_REF:-}" ]; then ' +
getPipelineScriptsCloneCommand({ withRef: true }) + "; " +
"else " +
getPipelineScriptsCloneCommand({ withRef: false }) + "; " +
"fi" + "; " +
"fi",
`export ARTIFACTS_DIR="$PWD/${artifactsFolderPath}"`,
`mkdir -p "$ARTIFACTS_DIR"`,
task.command,
],
artifacts: {
name: "${CI_JOB_NAME}_${CI_COMMIT_REF_NAME}",
expire_in: "7 days",
when: "always",
paths: [artifactsFolderPath],
},
variables: {
...task.gitlab.job.variables,
GH_CONTRIBUTOR: task.gitRef.contributor.owner,
GH_CONTRIBUTOR_REPO: task.gitRef.contributor.repo,
GH_CONTRIBUTOR_BRANCH: task.gitRef.contributor.branch,
GH_HEAD_SHA: headSha,
COMMIT_MESSAGE: task.command,
PIPELINE_SCRIPTS_REPOSITORY: ctx.pipelineScripts?.repository,
PIPELINE_SCRIPTS_REF: ctx.pipelineScripts?.ref,
PIPELINE_SCRIPTS_DIR: ".git/.scripts",
},
},
}),
)
const branchName = `cmd-bot/${
"prNumber" in task.gitRef
? task.gitRef.prNumber
: `${task.gitRef.contributor.owner}/${task.gitRef.contributor.branch}`
}`
await cmdRunner.run("git", ["branch", "-D", branchName], {
testAllowedErrorMessage: (err) => {
return err.endsWith("not found.")
},
})
await cmdRunner.run("git", ["checkout", "-b", branchName])
await cmdRunner.run("git", ["add", ".gitlab-ci.yml"])
await cmdRunner.run("git", ["commit", "-m", task.command])
const gitlabRemote = "gitlab"
const gitlabProjectPath = `${gitlab.pushNamespace}/${task.gitRef.upstream.repo}`
await cmdRunner.run("git", ["remote", "remove", gitlabRemote], {
testAllowedErrorMessage: (err) => {
return err.includes("No such remote:")
},
})
await cmdRunner.run("git", [
"remote",
"add",
gitlabRemote,
`https://token:${gitlab.accessToken}@${gitlab.domain}/${gitlabProjectPath}.git`,
])
/*
It's not necessary to say "--option ci.skip" because the pipeline execution
is conditional per workflow:rules
*/
await cmdRunner.run("git", ["push", "--force", gitlabRemote, "HEAD"])
const gitlabProjectApi = `https://${
gitlab.domain
}/api/v4/projects/${encodeURIComponent(gitlabProjectPath)}`
const branchNameUrlEncoded = encodeURIComponent(branchName)
/*
Wait until the branch is actually present on GitLab after pushing it. We've
noted this measure is required in
https://github.com/paritytech/polkadot/pull/5524#issuecomment-1128029579
because the pipeline creation request was sent too soon, before GitLab
registered the branch, therefore causing the "Reference not found" message.
*/
let wasBranchRegistered = false
const waitForBranchMaxTries = 3
const waitForBranchRetryDelay = 1024
const branchPresenceUrl = `${gitlabProjectApi}/repository/branches/${branchNameUrlEncoded}`
for (
let waitForBranchTryCount = 0;
waitForBranchTryCount < waitForBranchMaxTries;
waitForBranchTryCount++
) {
logger.info(
branchPresenceUrl,
`Sending request to see if the branch for task ${task.id} is ready`,
)
const response = await fetch(branchPresenceUrl, {
headers: { "PRIVATE-TOKEN": gitlab.accessToken },
})
if (
// The branch was not yet registered on GitLab; wait for it...
response.status === 404
) {
logger.info(
`Branch of task ${task.id} was not found. Waiting before retrying...`,
)
await millisecondsDelay(waitForBranchRetryDelay)
} else if (response.ok) {
wasBranchRegistered = true
break
} else {
throw new Error(
`Request to ${branchPresenceUrl} failed: ${await response.text()}`,
)
}
}
if (!wasBranchRegistered) {
throw new Error(
`Task's branch was not registered on GitLab after ${
waitForBranchMaxTries * waitForBranchRetryDelay
}ms`,
)
}
const pipelineCreationUrl = `${gitlabProjectApi}/pipeline?ref=${branchNameUrlEncoded}`
logger.info(
pipelineCreationUrl,
`Sending request to create a pipeline for task ${task.id}`,
)
const pipeline = await validatedFetch<{
id: number
project_id: number
}>(
fetch(pipelineCreationUrl, {
method: "POST",
headers: { "PRIVATE-TOKEN": gitlab.accessToken },
}),
Joi.object()
.keys({
id: Joi.number().required(),
project_id: Joi.number().required(),
})
.options({ allowUnknown: true }),
)
logger.info(pipeline, `Created pipeline for task ${task.id}`)
const jobFetchUrl = `${gitlabProjectApi}/pipelines/${pipeline.id}/jobs`
logger.info(
jobFetchUrl,
`Sending request to fetch the GitLab job created for task ${task.id}`,
)
const [job] = await validatedFetch<
[
{
web_url: string
},
]
>(
fetch(jobFetchUrl, { headers: { "PRIVATE-TOKEN": gitlab.accessToken } }),
Joi.array()
.items(
Joi.object()
.keys({ web_url: Joi.string().required() })
.options({ allowUnknown: true }),
)
.length(1)
.required(),
)
logger.info(job, `Fetched job for task ${task.id}`)
return getAliveTaskGitlabContext(ctx, {
id: pipeline.id,
projectId: pipeline.project_id,
jobWebUrl: job.web_url,
})
}
Example #27
Source File: main.ts From command-bot with Apache License 2.0 | 4 votes |
main = async () => {
const startDate = new Date()
const logFormat = (() => {
const value = process.env.LOG_FORMAT
switch (value) {
case "json": {
return value
}
case undefined: {
return null
}
default: {
throw new Error(`Invalid $LOG_FORMAT: ${value}`)
}
}
})()
const minLogLevel = (() => {
const value = process.env.MIN_LOG_LEVEL
switch (value) {
case undefined: {
return "info"
}
case "info":
case "warn":
case "error": {
return value
}
default: {
throw new Error(`Invalid $MIN_LOG_LEVEL: ${value}`)
}
}
})()
const logger = new Logger({
name: "command-bot",
minLogLevel,
logFormat,
impl: console,
})
const masterToken = envVar("MASTER_TOKEN")
const shouldPostPullRequestComment = (() => {
const value = process.env.POST_COMMENT
switch (value) {
case "false": {
return false
}
case undefined:
case "true": {
return true
}
default: {
throw new Error(`Invalid $POST_COMMENT: ${value}`)
}
}
})()
const dataPath = envVar("DATA_PATH")
await ensureDir(dataPath)
const appDbVersionPath = path.join(dataPath, "task-db-version")
const shouldClearTaskDatabaseOnStart = process.env.TASK_DB_VERSION
? await (async (appDbVersion) => {
const currentDbVersion = await (async () => {
try {
return (await readFile(appDbVersionPath)).toString().trim()
} catch (error) {
if (
/*
Test for the following error:
[Error: ENOENT: no such file or directory, open '/foo'] {
errno: -2,
code: 'ENOENT',
syscall: 'unlink',
path: '/foo'
}
*/
!(error instanceof Error) ||
(error as { code?: string })?.code !== "ENOENT"
) {
throw error
}
}
})()
if (currentDbVersion !== appDbVersion) {
await writeFile(appDbVersionPath, appDbVersion)
return true
}
})(process.env.TASK_DB_VERSION.trim())
: false
if (process.env.PING_PORT) {
// Signal that we have started listening until Probot kicks in
const pingPort = parseInt(process.env.PING_PORT)
const pingServer = stoppable(
http.createServer((_, res) => {
res.writeHead(200)
res.end()
}),
0,
)
pingServer.listen(pingPort)
}
const appId = envNumberVar("APP_ID")
const privateKey = Buffer.from(
envVar("PRIVATE_KEY_BASE64"),
"base64",
).toString()
const clientId = envVar("CLIENT_ID")
const clientSecret = envVar("CLIENT_SECRET")
const webhookSecret = envVar("WEBHOOK_SECRET")
let probotLogger: ProbotLogger | undefined = undefined
switch (logFormat) {
case "json": {
probotLogger = getLog({
level: "error",
logFormat: "json",
logLevelInString: true,
logMessageKey: "msg",
})
break
}
case null: {
break
}
default: {
const exhaustivenessCheck: never = logFormat
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
throw new Error(`Not exhaustive: ${exhaustivenessCheck}`)
}
}
const bot = Probot.defaults({
appId,
privateKey,
secret: webhookSecret,
logLevel: "info",
...(probotLogger === undefined
? {}
: { log: probotLogger.child({ name: "probot" }) }),
})
const server = new Server({
Probot: bot,
...(probotLogger === undefined
? {}
: { log: probotLogger.child({ name: "server" }) }),
webhookProxy: process.env.WEBHOOK_PROXY_URL,
})
const allowedOrganizations = envVar("ALLOWED_ORGANIZATIONS")
.split(",")
.filter((value) => {
return value.length !== 0
})
.map((value) => {
const parsedValue = parseInt(value)
assert(parsedValue)
return parsedValue
})
assert(allowedOrganizations.length)
const matrix = (() => {
if (process.env.MATRIX_HOMESERVER) {
return {
homeServer: process.env.MATRIX_HOMESERVER,
accessToken: envVar("MATRIX_ACCESS_TOKEN"),
}
} else {
return undefined
}
})()
const gitlabAccessToken = envVar("GITLAB_ACCESS_TOKEN")
const gitlabAccessTokenUsername = envVar("GITLAB_ACCESS_TOKEN_USERNAME")
const gitlabDomain = envVar("GITLAB_DOMAIN")
const gitlabPushNamespace = envVar("GITLAB_PUSH_NAMESPACE")
const gitlabJobImage = envVar("GITLAB_JOB_IMAGE")
const pipelineScripts = (() => {
const pipelineScriptsRepository = process.env.PIPELINE_SCRIPTS_REPOSITORY
if (pipelineScriptsRepository) {
return {
repository: pipelineScriptsRepository,
ref: process.env.PIPELINE_SCRIPTS_REF,
}
}
})()
await server.load((probot) => {
void setup(probot, server, {
appId,
clientId,
clientSecret,
privateKey,
logger,
startDate,
shouldPostPullRequestComment,
allowedOrganizations,
dataPath,
matrix,
masterToken,
shouldClearTaskDatabaseOnStart,
isDeployment: !!process.env.IS_DEPLOYMENT,
pipelineScripts,
gitlab: {
accessToken: gitlabAccessToken,
accessTokenUsername: gitlabAccessTokenUsername,
domain: gitlabDomain,
pushNamespace: gitlabPushNamespace,
jobImage: gitlabJobImage,
},
})
})
void server.start()
logger.info("Probot has started!")
}
Example #28
Source File: handlers.ts From sapio-studio with Mozilla Public License 2.0 | 4 votes |
export default function (window: BrowserWindow) {
setup_chat();
ipcMain.handle('bitcoin::command', async (event, arg) => {
const node = await get_bitcoin_node();
try {
return { ok: await node.command(arg) };
} catch (r: any) {
if (r instanceof RpcError) {
return {
err: { code: r.code, message: r.message, name: r.name },
};
} else if (r instanceof Error) {
return { err: r.toString() };
}
}
});
ipcMain.handle('emulator::kill', (event) => {
kill_emulator();
});
ipcMain.handle('emulator::start', (event) => {
start_sapio_oracle();
});
ipcMain.handle('emulator::read_log', (event) => {
return get_emulator_log();
});
ipcMain.handle('sapio::load_contract_list', async (event, workspace) => {
const contracts = await sapio.list_contracts(workspace);
return contracts;
});
ipcMain.handle(
'sapio::create_contract',
async (event, workspace, [which, psbt, args]) => {
const result = await sapio.create_contract(
workspace,
which,
psbt,
args
);
return result;
}
);
ipcMain.handle('sapio::show_config', async (event) => {
return await sapio.show_config();
});
ipcMain.handle('sapio::load_wasm_plugin', (event, workspace) => {
const plugins = dialog.showOpenDialogSync({
properties: ['openFile', 'multiSelections'],
filters: [{ extensions: ['wasm'], name: 'WASM' }],
});
const errs = [];
if (!plugins || !plugins.length) return { err: 'No Plugin Selected' };
for (const plugin of plugins) {
const loaded = sapio.load_contract_file_name(workspace, plugin);
if ('err' in loaded) {
return loaded;
}
}
return { ok: null };
});
ipcMain.handle('sapio::open_contract_from_file', (event) => {
const file = dialog.showOpenDialogSync(window, {
properties: ['openFile'],
filters: [
{
extensions: ['json'],
name: 'Sapio Contract Object',
},
],
});
if (file && file.length === 1) {
const data = readFileSync(file[0]!, {
encoding: 'utf-8',
});
return { ok: data };
}
});
ipcMain.handle(
'sapio::compiled_contracts::list',
async (event, workspace) => {
return (
await SapioWorkspace.new(workspace)
).list_compiled_contracts();
}
);
ipcMain.handle(
'sapio::compiled_contracts::trash',
async (event, workspace, file_name) => {
return (
await SapioWorkspace.new(workspace)
).trash_compiled_contract(file_name);
}
);
ipcMain.handle('sapio::psbt::finalize', (event, psbt) => {
return sapio.psbt_finalize(psbt);
});
ipcMain.handle(
'sapio::compiled_contracts::open',
async (event, workspace_name, file_name) => {
const workspace = await SapioWorkspace.new(workspace_name);
const data = await workspace.read_bound_data_for(file_name);
const name = await workspace.read_module_for(file_name);
const args = await workspace.read_args_for(file_name);
return { ok: { data, name, args } };
}
);
ipcMain.handle('sapio::workspaces::init', async (event, workspace) => {
await SapioWorkspace.new(workspace);
});
ipcMain.handle('sapio::workspaces::list', async (event) => {
return await SapioWorkspace.list_all();
});
ipcMain.handle('sapio::workspaces::trash', async (event, workspace) => {
return (await SapioWorkspace.new(workspace)).trash_workspace(workspace);
});
ipcMain.handle('write_clipboard', (event, s: string) => {
clipboard.writeText(s);
});
ipcMain.handle('save_psbt', async (event, psbt) => {
const path = await dialog.showSaveDialog(window, {
filters: [
{
extensions: ['psbt'],
name: 'Partially Signed Bitcoin Transaction',
},
],
});
if (path.filePath) {
await writeFile(path.filePath, psbt);
}
});
ipcMain.handle('fetch_psbt', async (event, psbt) => {
const path = await dialog.showOpenDialog(window, {
filters: [
{
extensions: ['psbt'],
name: 'Partially Signed Bitcoin Transaction',
},
],
});
if (path && path.filePaths.length) {
return await readFile(path.filePaths[0]!, { encoding: 'utf-8' });
}
});
ipcMain.handle('save_contract', async (event, psbt) => {
const path = await dialog.showSaveDialog(window, {
filters: [{ extensions: ['json'], name: 'Sapio Contract Object' }],
});
if (path.filePath) {
await writeFile(path.filePath, psbt);
}
});
ipcMain.handle(
'save_settings',
async (event, which: Prefs, data: string) => {
return preferences.save(which, JSON.parse(data));
}
);
ipcMain.handle('load_settings_sync', (event, which: Prefs) => {
return preferences.data[which];
});
ipcMain.handle('select_filename', async (event) => {
const path = await dialog.showOpenDialog(window);
if (path && path.filePaths.length == 1) {
return path.filePaths[0]!;
}
return null;
});
}
Example #29
Source File: server.ts From cloudmusic-vscode with MIT License | 4 votes |
static async init() {
const [buf] = await Promise.allSettled([
readFile(RETAIN_FILE),
rm(ipcServerPath, { recursive: true, force: true }),
]);
if (buf.status === "fulfilled")
this._retain = JSON.parse(buf.value.toString()) as unknown[];
this._server = createServer((socket) => {
if (this._timer) {
clearTimeout(this._timer);
this._timer = undefined;
}
this._sockets.add(socket);
this._buffer.set(socket, "");
socket
.setEncoding("utf8")
.on("data", (data) => {
const buffer = (this._buffer.get(socket) ?? "") + data.toString();
const msgs = buffer.split(ipcDelimiter);
this._buffer.set(socket, msgs.pop() ?? "");
for (const msg of msgs)
this._handler(
JSON.parse(msg) as IPCClientMsg | NeteaseAPICMsg<"album">,
socket
);
})
.on("close", (/* err */) => {
let isMaster = false;
{
const [master] = this._sockets;
isMaster = master === socket;
}
socket?.destroy();
this._sockets.delete(socket);
this._buffer.delete(socket);
if (this._sockets.size) {
this._setMaster();
if (isMaster) {
// Master was gone, the wasm player was destroyed
// So we need to recreate it on new master
Player.wasmOpen();
}
} else {
Player.pause();
this._timer = setTimeout(() => {
if (this._sockets.size) return;
this.stop();
IPCBroadcastServer.stop();
Promise.allSettled([
MusicCache.store(),
rm(TMP_DIR, { recursive: true }),
writeFile(RETAIN_FILE, JSON.stringify(this._retain)),
]).finally(() => process.exit());
}, 20000);
}
})
.on("error", logError);
this._setMaster();
if (this._sockets.size === 1) {
// retain
if (!this._first) {
Player.play();
this.send(socket, { t: IPCControl.retain, items: this._retain });
this._retain = [];
} else this._first = false;
} else {
this.sendToMaster({ t: IPCControl.new });
this.send(socket, {
t: Player.playing ? IPCPlayer.play : IPCPlayer.pause,
});
}
})
.on("error", logError)
.listen(ipcServerPath);
}