yargs/helpers#hideBin TypeScript Examples

The following examples show how to use yargs/helpers#hideBin. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: release-pre.ts    From fuels-ts with Apache License 2.0 6 votes vote down vote up
(async () => {
  const pkgjson = fs.readJSONSync(resolveDir('./package.json'));
  /**
   * Using yargs to get arguments from shell command
   */
  const argv = await yargs(hideBin(process.argv)).argv;
  const tag = argv.tag;
  const sha = argv.sha;
  const ref = argv.ref;
  const version = `0.0.0-${ref}-${(sha as string).slice(0, 8)}`;

  /**
   * Change all package.json inside ./packages and publish
   */
  await changeAllPkgJSON(version);
  sh.exec(`pnpm publish -r --tag=${tag} --no-git-checks --force`);
  await changeAllPkgJSON(pkgjson.version);
})();
Example #2
Source File: cli_index.ts    From tda-api-client with GNU General Public License v3.0 6 votes vote down vote up
yargs(hideBin(process.argv))
    // .commandDir("cli", { extensions: ["js, ts"]})
    .command(accountsCli)
    .command(authenticationCli)
    .command(instrumentsCli)
    .command(markethoursCli)
    .command(moversCli)
    .command(optionschainCli)
    .command(ordersCli)
    .command(pricehistoryCli)
    .command(quotesCli)
    .command(savedordersCli)
    .command(transactionsCli)
    .command(userinfoCli)
    .command(watchlistsCli)
    .demandCommand()
    .help()
    .wrap(terminalWidth())
    .option("verbose", {description: "Print to console some extra information", type: "boolean"})
    .argv;
Example #3
Source File: index.ts    From nx-plugins with MIT License 6 votes vote down vote up
commands = yargs(hideBin(process.argv))
  .command(
    'set-affected',
    'Set GitHub Action variables for affected projects',
    (yargs) => yargs,
    () => {
      setGhActionsAffected();
    },
  )
  .command(
    'set-pr-variables',
    'Set GitHub Action variables for Pull Request Event',
    (yargs) => yargs,
    () => {
      setGhActionsPrVariables();
    },
  )
  .command(
    'set-push-variables',
    'Set GitHub Action variables for Push Event',
    (yargs) => yargs,
    () => {
      setGhActionsPushVariables();
    },
  )
Example #4
Source File: index.ts    From yaclt with Mozilla Public License 2.0 6 votes vote down vote up
BuildCli = (): Argv => {
  const cli = yargs(hideBin(process.argv)).scriptName("yaclt");

  // register middlewares
  cli
    .middleware(
      ValidateArgvMiddleware.handler,
      ValidateArgvMiddleware.preValidation
    )
    .middleware(
      CallFunctionArgsMiddleware.handler,
      CallFunctionArgsMiddleware.preValidation
    )
    .middleware(
      TemplatesFromFilesMiddleware.handler,
      TemplatesFromFilesMiddleware.preValidation
    )
    .middleware(
      LastCommitMessageMiddleware.handler,
      LastCommitMessageMiddleware.preValidation
    )
    .middleware(LogLevelMiddleware.handler, LogLevelMiddleware.preValidation);

  cli.version(versionInfo);

  for (const command of Commands) {
    cli.command(command);
  }

  cli
    .completion()
    .demandCommand()
    .recommendCommands()
    .strictCommands()
    .config(config);

  return cli;
}
Example #5
Source File: test-api.ts    From airnode with MIT License 6 votes vote down vote up
yargs(hideBin(process.argv))
  .command(
    '$0',
    'Test API calls',
    {
      'endpoint-id': {
        alias: 'e',
        description: 'The ID of the endpoint to be tested, which is derived from the OIS title and endpoint name',
        type: 'string',
        required: true,
      },
      parameters: {
        alias: 'p',
        description: 'Request parameters',
        default: '{}',
        type: 'string',
      },
    },
    async (args) => {
      logger.log(`Running API test call with arguments ${longArguments(args)}`);
      const parameters = JSON.parse(args.parameters);
      if (!parameters) {
        throw new Error('Missing request parameters');
      }
      logger.log(JSON.stringify(await local.processHttpRequest(args['endpoint-id'], parameters)));
    }
  )
  .help()
  .strict()
  .wrap(120).argv;
Example #6
Source File: cli.ts    From airnode with MIT License 6 votes vote down vote up
cli = () => {
  const cliArguments = yargs(hideBin(process.argv))
    .option('config', {
      description: 'Path to "config.json" file to validate',
      alias: 'c',
      type: 'string',
      demandOption: true,
    })
    .option('secrets', {
      description: 'Path to "secrets.env" file to interpolate in the config',
      alias: 's',
      type: 'string',
      // Making the secrets file required. If the users do not use a secrets file they can pass any empty file. However,
      // not passing a secrets file is not recommended and usually is a mistake.
      demandOption: true,
    })
    .strict()
    .help()
    .wrap(120)
    .example(examples.map((e) => [e]))
    .parseSync();

  const { config, secrets } = cliArguments;
  validateConfiguration(config, secrets);
}
Example #7
Source File: index.ts    From radiopanel with GNU General Public License v3.0 6 votes vote down vote up
(async () => {
	const argv = await yargs(hideBin(process.argv)).argv

	if (argv.ships > 3 && argv.distance < 53.5) {
	console.log('Plunder more riffiwobbles!')
	} else {
	console.log('Retreat from the xupptumblers!')
	}
})
Example #8
Source File: index.ts    From nft-maker-js with Do What The F*ck You Want To Public License 6 votes vote down vote up
yargs(hideBin(process.argv))
  .commandDir('commands')
  .command(
    '$0',
    'NFT Maker CLI usage',
    () => undefined,
    () => yargs.showHelp()
  )
  .strict()
  .alias({ h: 'help' })
  .epilogue('For more information, check https://twitter.com/davidhemphill')
  .argv
Example #9
Source File: build.ts    From mantine with MIT License 6 votes vote down vote up
{ argv }: { argv: any } = yargs(hideBin(process.argv))
  .option('all', {
    type: 'boolean',
    default: false,
    description: 'Build all packages.',
  })
  .option('project', {
    type: 'string',
    description: 'Specify package which should be bundled.',
  })
  .option('analyze', {
    type: 'boolean',
    default: false,
    description: 'Generate bundle analytics.',
  })
  .option('sourcemap', {
    type: 'boolean',
    default: true,
    description: 'Generate sourcemap.',
  })
  .option('formats', {
    type: 'string',
    array: true,
    choices: ['es', 'cjs', 'umd'],
    default: ['es', 'cjs'],
    description: "Specify module code generation: 'es', 'cjs'.",
  })
  .example([
    ['$0 all --formats umd cjs', 'Bundle packages to umd and cjs.'],
    ['$0 @mantine/core --analyze', 'Bundle mantine-core package and generate bundle analytics.'],
  ])
Example #10
Source File: release.ts    From mantine with MIT License 6 votes vote down vote up
{ argv }: { argv: any } = yargs(hideBin(process.argv))
  .option('stage', {
    type: 'string',
    choices: ['alpha', 'beta', 'rc'],
    description: "Prerelease stage: 'alpha', 'beta' or 'rc'",
  })
  .option('tag', {
    type: 'string',
    default: 'latest',
    description: 'Tag',
  })
  .option('skip-version-check', {
    type: 'boolean',
    default: false,
    description: 'Skip checking version.',
  })
  .option('skip-build', {
    type: 'boolean',
    default: false,
    description: 'Skip building step.',
  })
  .option('skip-publish', {
    type: 'boolean',
    default: false,
    description: 'Skip publishing step.',
  })
  .example([
    ['$0 minor --skip-build', 'Release but skip building packages.'],
    ['$0 minor --alpha', 'Prerelease to alpha stage.'],
  ])
Example #11
Source File: cli.ts    From openapi-generator-typescript with MIT License 5 votes vote down vote up
/**
 * @internal
 */
export async function _runCli() {
  const argv = yargs(hideBin(process.argv))
    .strict()
    .usage('$0 [options]')
    .option('config', {
      alias: 'c',
      type: 'string',
      description: 'config file',
      default: './openapi-generator-typescript.jsonc',
    })
    .option('update', {
      type: 'array',
      description: 'update api document files',
    })
    .option('generate', {
      type: 'boolean',
      description: 'generate typescript file',
    })
    .version()
    .alias('v', 'version')
    .showHelpOnFail(true, `Specify --help for available options`)
    .help(`h`)
    .alias(`h`, `help`).argv;

  if (argv.update !== undefined) {
    const { configDir, config } = await getConfig(argv.config);

    const argUpdate = argv.update.map(String);
    const docs =
      argUpdate.length === 0
        ? config.documents
        : config.documents.filter(x => argUpdate.includes(x.namespace));

    await updateApi(docs, configDir);
  }

  if (argv.generate === true) {
    const { configDir, config } = await getConfig(argv.config);
    await generateTarget(config, configDir);
  }
}
Example #12
Source File: index.ts    From codehawk-cli with MIT License 5 votes vote down vote up
rawArgs = hideBin(process.argv)
Example #13
Source File: index.ts    From parachain-launch with Apache License 2.0 5 votes vote down vote up
yargs(hideBin(process.argv))
  .strict()
  .options({
    output: {
      alias: 'o',
      type: 'string',
      default: 'output',
      description: 'The output directory path',
    },
    yes: {
      alias: 'y',
      type: 'boolean',
      default: false,
      description: 'Yes for options',
    },
  })
  .command(
    'generate [config]',
    'generate the network genesis and docker-compose.yml',
    (yargs) =>
      yargs.positional('config', {
        describe: 'Path to config.yml file',
        default: 'config.yml',
      }),
    (argv) => {
      const { config: configPath } = argv;

      let config: Config | undefined;

      try {
        const configFile = fs.readFileSync(configPath, 'utf8');
        config = YAML.parse(configFile);
      } catch (e) {
        console.error('Invalid config file:', configPath, e);
      }

      if (config) {
        // no parachains config(relaychain only)
        if (!config.parachains) {
          config.parachains = [];
        }
        generate(config, argv).catch(fatal);
      }
    }
  )
  .help('h')
  .alias('h', 'help').argv;
Example #14
Source File: index.ts    From pdf-visual-diff with MIT License 5 votes vote down vote up
yargs(hideBin(process.argv)).command(approve).command(discard).demandCommand().parse()
Example #15
Source File: index.ts    From hoprnet with GNU General Public License v3.0 5 votes vote down vote up
yargsInstance = yargs(hideBin(process.argv))
Example #16
Source File: typesense-admin.ts    From advocacy-maps with MIT License 5 votes vote down vote up
yargs(hideBin(process.argv))
  .scriptName("typesense-admin")
  .command(
    "console",
    "start a node repl with an initialized client",
    () => {},
    (args: Args) => {
      globalThis.client = resolveClient(args)
      repl.start({})
    }
  )
  .command(
    "create-search-key",
    "create a new search key",
    () => {},
    async (args: Args) => {
      const client = resolveClient(args)
      const key = await client.keys().create({
        description: "Search-only key.",
        actions: ["documents:search"],
        collections: ["*"]
      })
      console.log("Created", key.value)
    }
  )
  .command(
    "list-keys",
    "list keys",
    () => {},
    async (args: Args) => {
      const client = resolveClient(args)
      console.log(await client.keys().retrieve())
    }
  )
  .command(
    "delete-key <id>",
    "list keys",
    () => {},
    async (args: Args & { id: string }) => {
      const client = resolveClient(args)
      console.log(await client.keys(Number(args.id)).delete())
    }
  )
  .options({
    url: { string: true, alias: "u" },
    key: { string: true, alias: "k" },
    env: { choices: Object.keys(envs), alias: "e" }
  })
  .check(argv => {
    if (!argv.env && !argv.url) return "Must specify env or url"
    if (!argv.env && !argv.key) return "Must specify env or key"
    return true
  }).argv
Example #17
Source File: amman.ts    From amman with Apache License 2.0 4 votes vote down vote up
commands = yargs(hideBin(process.argv))
  // -----------------
  // start
  // -----------------
  .command(
    'start',
    'Launches a solana-test-validator and the amman relay and/or mock storage if so configured',
    (args) => {
      return args
        .positional('config', {
          describe:
            'File containing config with `validator` property along with options for the relay and storage.',
          type: 'string',
          demandOption: false,
        })
        .option('forceClone', {
          alias: 'f',
          describe:
            'Whether or not to force updating the programs from on chain',
          type: 'boolean',
          default: false,
        })
        .option('load', {
          alias: 'l',
          describe: 'Label of the snapshot to load from snapshots folder',
          type: 'string',
        })
        .help('help', startHelp())
    }
  )
  // -----------------
  // stop
  // -----------------
  .command(
    'stop',
    'Stops the relay and storage and kills the running solana test validator'
  )
  // -----------------
  // logs
  // -----------------
  .command('logs', `Launches 'solana logs' and pipes them through a prettifier`)
  // -----------------
  // airdrop
  // -----------------
  .command('airdrop', 'Airdrops provided Sol to the payer', (args) =>
    args
      .positional('destination', {
        describe:
          'A base58 PublicKey string or the relative path to the Keypair file of the airdrop destination',
        type: 'string',
      })
      .positional('amount', {
        describe: 'The amount of Sol to airdrop',
        type: 'number',
        default: 1,
      })
      .option('label', {
        alias: 'l',
        describe: 'The label to give to the account being airdropped to',
        type: 'string',
        default: 'payer',
      })
      .option('commitment', {
        alias: 'c',
        describe: 'The commitment to use for the Airdrop transaction',
        type: 'string',
        choices: commitments,
        default: 'singleGossip',
      })
      .help('help', airdropHelp())
  )
  // -----------------
  // label
  // -----------------
  .command(
    'label',
    'Adds labels for accounts or transactions to amman',
    (args) => args.help('help', labelHelp())
  )
  // -----------------
  // account
  // -----------------
  .command(
    'account',
    'Retrieves account information for a PublicKey or a label or shows all labeled accounts',
    (args) =>
      args
        .positional('address', {
          describe:
            'A base58 PublicKey string or the label of the acount to retrieve.' +
            ' If it is not provided, all labeled accounts are shown.',
          type: 'string',
          demandOption: false,
        })
        .option('includeTx', {
          alias: 't',
          describe:
            'If to include transactions in the shown labeled accounts when no label/address is provided',
          type: 'boolean',
          default: false,
        })
        .option('save', {
          alias: 's',
          describe:
            'If set the account information is saved to a file inside ./.amman/accounts',
          type: 'boolean',
          default: false,
        })
  )
  // -----------------
  // snapshot
  // -----------------
  .command(
    'snapshot',
    'Creates a snapshot of the current accounts known to amman',
    (args) => {
      args.positional('label', {
        describe:
          'The label to give to the snapshot. Default label is the account address.',
        type: 'string',
        demandOption: false,
      })
    }
  )

  // -----------------
  // run
  // -----------------
  .command(
    'run',
    'Executes the provided command after expanding all address labels',
    (args) =>
      args
        .option('label', {
          alias: 'l',
          describe: 'Used to label addresses found int the command output ',
          type: 'string',
          multiple: true,
          demandOption: false,
        })
        .option('txOnly', {
          alias: 't',
          describe: 'Includes only transaction addresses when labeling.',
          type: 'string',
          demandOption: false,
          default: false,
        })
        .option('accOnly', {
          alias: 'a',
          describe: 'Includes only account addresses when labeling.',
          type: 'string',
          demandOption: false,
          default: false,
        })
        .help('help', runHelp())
  )
Example #18
Source File: cli.ts    From pg-to-ts with MIT License 4 votes vote down vote up
argv = yargs(hideBin(process.argv))
  .usage('Usage: $0 <command> [options]')
  .example(
    '$0 generate -c postgres://username:password@localhost/db -t table1 -t table2 -s schema -o interface_output.ts',
    'generate typescript interfaces from schema',
  )
  .global('config')
  .default('config', 'pg-to-ts.json')
  .config()
  .env('PG_TO_TS')
  .demandCommand(1)
  .command(
    'generate',
    'Generate TypeScript matching a Postgres database',
    cmd => {
      return cmd.options({
        conn: {
          alias: 'c',
          describe: 'database connection string',
          demandOption: true,
          type: 'string',
        },
        output: {
          alias: 'o',
          describe: 'output file name',
          type: 'string',
          demandOption: true,
        },
        table: {
          alias: 't',
          describe:
            'table name (may specify multiple times for multiple tables)',
          type: 'string',
          array: true,
        },
        excludedTable: {
          alias: 'x',
          describe:
            'excluded table name (may specify multiple times to exclude multiple tables)',
          type: 'string',
          array: true,
        },
        schema: {
          alias: 's',
          type: 'string',
          describe: 'schema name',
        },
        camelCase: {
          alias: 'C',
          describe: 'Camel-case columns (e.g. user_id --> userId)',
          type: 'boolean',
        },
        datesAsStrings: {
          describe:
            'Treat date, timestamp, and timestamptz as strings, not Dates. ' +
            'Note that you will have to ensure that this is accurate at runtime. ' +
            'See https://github.com/brianc/node-pg-types for details.',
          type: 'boolean',
        },
        prefixWithSchemaNames: {
          describe:
            'Prefix the schema name to the table name. ' +
            ' All exports will be prefixed with the schema name.',
          type: 'boolean',
        },
        jsonTypesFile: {
          describe:
            'If a JSON column has an @type jsdoc tag in its comment, assume that ' +
            'type can be imported from this path.',
          type: 'string',
        },
        noHeader: {
          describe: 'Do not write header',
          type: 'boolean',
        },
      });
    },
  )
  .strictCommands()
  .help('h')
  .alias('h', 'help')
  .parseSync()
Example #19
Source File: index.ts    From airnode with MIT License 4 votes vote down vote up
yargs(hideBin(process.argv))
  .option('debug', {
    description: 'Run in debug mode',
    default: false,
    type: 'boolean',
  })
  .command(
    'deploy',
    'Executes Airnode deployments specified in the config file',
    {
      configuration: {
        alias: ['c', 'config', 'conf'],
        description: 'Path to configuration file',
        default: 'config/config.json',
        type: 'string',
      },
      secrets: {
        alias: 's',
        description: 'Path to secrets file',
        default: 'config/secrets.env',
        type: 'string',
      },
      receipt: {
        alias: 'r',
        description: 'Output path for receipt file',
        default: 'output/receipt.json',
        type: 'string',
      },
    },
    async (args) => {
      logger.debugMode(args.debug as boolean);
      logger.debug(`Running command ${args._[0]} with arguments ${longArguments(args)}`);
      await runCommand(() => deploy(args.configuration, args.secrets, args.receipt));
    }
  )
  .command(
    'remove',
    'Removes a deployed Airnode instance',
    {
      receipt: {
        alias: 'r',
        description: 'Path to receipt file',
        type: 'string',
      },
      'airnode-address-short': {
        alias: 'a',
        description: 'Airnode Address (short version)',
        type: 'string',
      },
      stage: {
        alias: 's',
        description: 'Stage (environment)',
        type: 'string',
      },
      'cloud-provider': {
        alias: 'c',
        description: 'Cloud provider',
        choices: ['aws', 'gcp'] as const,
      },
      region: {
        alias: 'e',
        description: 'Region',
        type: 'string',
      },
      'project-id': {
        alias: 'p',
        description: 'Project ID (GCP only)',
        type: 'string',
      },
    },
    async (args) => {
      logger.debugMode(args.debug as boolean);
      logger.debug(`Running command ${args._[0]} with arguments ${longArguments(args)}`);
      const receiptRemove = !!args.receipt;
      const descriptiveArgsCommon = ['airnode-address-short', 'stage', 'cloud-provider', 'region'];
      const descriptiveArgsCloud = {
        aws: descriptiveArgsCommon,
        gcp: [...descriptiveArgsCommon, 'project-id'],
      };
      const descriptiveArgsAll = uniq(
        Object.values(descriptiveArgsCloud).reduce((result, array) => [...result, ...array])
      );
      const argsProvided = intersection([...descriptiveArgsAll, 'receipt'], keys(args));
      const descriptiveArgsProvided = intersection(descriptiveArgsAll, keys(args));

      if (isEmpty(argsProvided)) {
        // Throwing strings to prevent yargs from showing error stack trace
        throw `Missing arguments. You have to provide either receipt file or describe the Airnode deployment with ${printableArguments(
          descriptiveArgsAll
        )}.`;
      }

      if (receiptRemove && !isEmpty(descriptiveArgsProvided)) {
        // Throwing strings to prevent yargs from showing error stack trace
        throw "Can't mix data from receipt and data from command line arguments.";
      }

      if (receiptRemove) {
        await runCommand(() => removeWithReceipt(args.receipt!));
        return;
      }

      if (!args['cloud-provider']) {
        // Throwing strings to prevent yargs from showing error stack trace
        throw "Missing argument, must provide '--cloud-provider";
      }

      const descriptiveArgsRequired = descriptiveArgsCloud[args['cloud-provider']];
      const descriptiveArgsMissing = difference(descriptiveArgsRequired, descriptiveArgsProvided);

      if (isEmpty(descriptiveArgsMissing)) {
        await runCommand(() =>
          remove(args['airnode-address-short']!.toLowerCase(), args.stage!, {
            type: args['cloud-provider']!,
            region: args.region!,
            projectId: args['project-id'],
          } as CloudProvider)
        );
        return;
      }

      if (!isEmpty(descriptiveArgsMissing)) {
        // Throwing strings to prevent yargs from showing error stack trace
        throw `Missing arguments: ${printableArguments(descriptiveArgsMissing)}.`;
      }
    }
  )
  .example(cliExamples.map((line) => [`$0 ${line}\n`]))
  .help()
  .demandCommand(1)
  .strict()
  .wrap(120).argv;
Example #20
Source File: index.ts    From graphql-mesh with MIT License 4 votes vote down vote up
export async function graphqlMesh(
  cliParams = DEFAULT_CLI_PARAMS,
  args = hideBin(process.argv),
  cwdPath = process.cwd()
) {
  let baseDir = cwdPath;
  let logger: Logger = new DefaultLogger(cliParams.initialLoggerPrefix);
  return yargs(args)
    .help()
    .option('r', {
      alias: 'require',
      describe: 'Loads specific require.extensions before running the codegen and reading the configuration',
      type: 'array' as const,
      default: [],
      coerce: (externalModules: string[]) =>
        Promise.all(
          externalModules.map(module => {
            const localModulePath = pathModule.resolve(baseDir, module);
            const islocalModule = fs.existsSync(localModulePath);
            return defaultImportFn(islocalModule ? localModulePath : module);
          })
        ),
    })
    .option('dir', {
      describe: 'Modified the base directory to use for looking for ' + cliParams.configName + ' config file',
      type: 'string',
      default: baseDir,
      coerce: dir => {
        if (pathModule.isAbsolute(dir)) {
          baseDir = dir;
        } else {
          baseDir = pathModule.resolve(cwdPath, dir);
        }
        const tsConfigPath = pathModule.join(baseDir, 'tsconfig.json');
        const tsConfigExists = fs.existsSync(tsConfigPath);
        tsNodeRegister({
          transpileOnly: true,
          typeCheck: false,
          dir: baseDir,
          require: ['graphql-import-node/register'],
          compilerOptions: {
            module: 'commonjs',
          },
        });
        if (tsConfigExists) {
          try {
            const tsConfigStr = fs.readFileSync(tsConfigPath, 'utf-8');
            const tsConfigStrWithoutComments = stripJSONComments(tsConfigStr);
            const tsConfig = JSON.parse(tsConfigStrWithoutComments);
            if (tsConfig.compilerOptions?.paths) {
              tsConfigPathsRegister({
                baseUrl: baseDir,
                paths: tsConfig.compilerOptions.paths,
              });
            }
          } catch (e) {
            logger.warn(`Unable to read TSConfig file ${tsConfigPath};\n`, e);
          }
        }
        if (fs.existsSync(pathModule.join(baseDir, '.env'))) {
          dotEnvRegister({
            path: pathModule.join(baseDir, '.env'),
          });
        }
      },
    })
    .command<{ port: number; prod: boolean; validate: boolean }>(
      cliParams.devServerCommand,
      'Serves a GraphQL server with GraphQL interface by building artifacts on the fly',
      builder => {
        builder.option('port', {
          type: 'number',
        });
      },
      async args => {
        try {
          const outputDir = pathModule.join(baseDir, cliParams.artifactsDir);

          process.env.NODE_ENV = 'development';
          const meshConfig = await findAndParseConfig({
            dir: baseDir,
            artifactsDir: cliParams.artifactsDir,
            configName: cliParams.configName,
            additionalPackagePrefixes: cliParams.additionalPackagePrefixes,
          });
          logger = meshConfig.logger;
          const meshInstance$ = getMesh(meshConfig);
          meshInstance$
            .then(({ schema }) => writeFile(pathModule.join(outputDir, 'schema.graphql'), printSchema(schema)))
            .catch(e => {
              logger.error(`An error occured while writing the schema file: ${e.stack || e.message}`);
            });
          meshInstance$
            .then(({ schema, rawSources }) =>
              generateTsArtifacts(
                {
                  unifiedSchema: schema,
                  rawSources,
                  mergerType: meshConfig.merger.name,
                  documents: meshConfig.documents,
                  flattenTypes: false,
                  importedModulesSet: new Set(),
                  baseDir,
                  meshConfigCode: `
                import { findAndParseConfig } from '@graphql-mesh/cli';
                function getMeshOptions() {
                  console.warn('WARNING: These artifacts are built for development mode. Please run "${
                    cliParams.commandName
                  } build" to build production artifacts');
                  return findAndParseConfig({
                    dir: baseDir,
                    artifactsDir: ${JSON.stringify(cliParams.artifactsDir)},
                    configName: ${JSON.stringify(cliParams.configName)},
                    additionalPackagePrefixes: ${JSON.stringify(cliParams.additionalPackagePrefixes)},
                  });
                }
              `,
                  logger,
                  sdkConfig: meshConfig.config.sdk,
                  fileType: 'ts',
                  codegenConfig: meshConfig.config.codegen,
                },
                cliParams
              )
            )
            .catch(e => {
              logger.error(`An error occurred while building the artifacts: ${e.stack || e.message}`);
            });
          const serveMeshOptions: ServeMeshOptions = {
            baseDir,
            argsPort: args.port,
            getBuiltMesh: () => meshInstance$,
            logger: meshConfig.logger.child('Server'),
            rawServeConfig: meshConfig.config.serve,
          };
          if (meshConfig.config.serve?.customServerHandler) {
            const customServerHandler = await loadFromModuleExportExpression<any>(
              meshConfig.config.serve.customServerHandler,
              {
                defaultExportName: 'default',
                cwd: baseDir,
                importFn: defaultImportFn,
              }
            );
            await customServerHandler(serveMeshOptions);
          } else {
            await serveMesh(serveMeshOptions, cliParams);
          }
        } catch (e) {
          handleFatalError(e, logger);
        }
      }
    )
    .command<{ port: number; prod: boolean; validate: boolean }>(
      cliParams.prodServerCommand,
      'Serves a GraphQL server with GraphQL interface based on your generated artifacts',
      builder => {
        builder.option('port', {
          type: 'number',
        });
      },
      async args => {
        try {
          const builtMeshArtifactsPath = pathModule.join(baseDir, cliParams.artifactsDir);
          if (!(await pathExists(builtMeshArtifactsPath))) {
            throw new Error(
              `Seems like you haven't build the artifacts yet to start production server! You need to build artifacts first with "${cliParams.commandName} build" command!`
            );
          }
          process.env.NODE_ENV = 'production';
          const mainModule = pathModule.join(builtMeshArtifactsPath, 'index');
          const builtMeshArtifacts = await defaultImportFn(mainModule);
          const getMeshOptions: GetMeshOptions = await builtMeshArtifacts.getMeshOptions();
          logger = getMeshOptions.logger;
          const rawServeConfig: YamlConfig.Config['serve'] = builtMeshArtifacts.rawServeConfig;
          const serveMeshOptions: ServeMeshOptions = {
            baseDir,
            argsPort: args.port,
            getBuiltMesh: () => getMesh(getMeshOptions),
            logger: getMeshOptions.logger.child('Server'),
            rawServeConfig,
          };
          if (rawServeConfig?.customServerHandler) {
            const customServerHandler = await loadFromModuleExportExpression<any>(rawServeConfig.customServerHandler, {
              defaultExportName: 'default',
              cwd: baseDir,
              importFn: defaultImportFn,
            });
            await customServerHandler(serveMeshOptions);
          } else {
            await serveMesh(serveMeshOptions, cliParams);
          }
        } catch (e) {
          handleFatalError(e, logger);
        }
      }
    )
    .command(
      cliParams.validateCommand,
      'Validates artifacts',
      builder => {},
      async args => {
        let destroy: VoidFunction;
        try {
          if (!(await pathExists(pathModule.join(baseDir, cliParams.artifactsDir)))) {
            throw new Error(
              `You cannot validate artifacts now because you don't have built artifacts yet! You need to build artifacts first with "${cliParams.commandName} build" command!`
            );
          }

          const store = new MeshStore(
            cliParams.artifactsDir,
            new FsStoreStorageAdapter({
              cwd: baseDir,
              importFn: defaultImportFn,
              fileType: 'ts',
            }),
            {
              readonly: false,
              validate: true,
            }
          );

          logger.info(`Reading the configuration`);
          const meshConfig = await findAndParseConfig({
            dir: baseDir,
            store,
            importFn: defaultImportFn,
            ignoreAdditionalResolvers: true,
            artifactsDir: cliParams.artifactsDir,
            configName: cliParams.configName,
            additionalPackagePrefixes: cliParams.additionalPackagePrefixes,
          });
          logger = meshConfig.logger;

          logger.info(`Generating the unified schema`);
          const mesh = await getMesh(meshConfig);
          logger.info(`Artifacts have been validated successfully`);
          destroy = mesh?.destroy;
        } catch (e) {
          handleFatalError(e, logger);
        }
        if (destroy) {
          destroy();
        }
      }
    )
    .command<{ fileType: 'json' | 'ts' | 'js' }>(
      cliParams.buildArtifactsCommand,
      'Builds artifacts',
      builder => {
        builder.option('fileType', {
          type: 'string',
          choices: ['json', 'ts', 'js'],
          default: 'ts',
        });
      },
      async args => {
        try {
          const outputDir = pathModule.join(baseDir, cliParams.artifactsDir);

          logger.info('Cleaning existing artifacts');
          await rmdirs(outputDir);

          const importedModulesSet = new Set<string>();
          const importPromises: Promise<any>[] = [];
          const importFn = (moduleId: string, noCache: boolean) => {
            const importPromise = defaultImportFn(moduleId)
              .catch(e => {
                if (e.message.includes('getter')) {
                  return e;
                } else {
                  throw e;
                }
              })
              .then(m => {
                if (!noCache) {
                  importedModulesSet.add(moduleId);
                }
                return m;
              });
            importPromises.push(importPromise.catch(() => {}));
            return importPromise;
          };

          await Promise.all(importPromises);

          const store = new MeshStore(
            cliParams.artifactsDir,
            new FsStoreStorageAdapter({
              cwd: baseDir,
              importFn,
              fileType: args.fileType,
            }),
            {
              readonly: false,
              validate: false,
            }
          );

          logger.info(`Reading the configuration`);
          const meshConfig = await findAndParseConfig({
            dir: baseDir,
            store,
            importFn,
            ignoreAdditionalResolvers: true,
            artifactsDir: cliParams.artifactsDir,
            configName: cliParams.configName,
            additionalPackagePrefixes: cliParams.additionalPackagePrefixes,
            generateCode: true,
          });
          logger = meshConfig.logger;

          logger.info(`Generating the unified schema`);
          const { schema, destroy, rawSources } = await getMesh(meshConfig);
          await writeFile(pathModule.join(outputDir, 'schema.graphql'), printSchema(schema));

          logger.info(`Generating artifacts`);
          await generateTsArtifacts(
            {
              unifiedSchema: schema,
              rawSources,
              mergerType: meshConfig.merger.name,
              documents: meshConfig.documents,
              flattenTypes: false,
              importedModulesSet,
              baseDir,
              meshConfigCode: meshConfig.code,
              logger,
              sdkConfig: meshConfig.config.sdk,
              fileType: args.fileType,
              codegenConfig: meshConfig.config.codegen,
            },
            cliParams
          );

          logger.info(`Cleanup`);
          destroy();
          logger.info('Done! => ' + outputDir);
        } catch (e) {
          handleFatalError(e, logger);
        }
      }
    )
    .command<{ source: string }>(
      cliParams.sourceServerCommand + ' <source>',
      'Serves specific source in development mode',
      builder => {
        builder.positional('source', {
          type: 'string',
          requiresArg: true,
        });
      },
      async args => {
        process.env.NODE_ENV = 'development';
        const meshConfig = await findAndParseConfig({
          dir: baseDir,
          artifactsDir: cliParams.artifactsDir,
          configName: cliParams.configName,
          additionalPackagePrefixes: cliParams.additionalPackagePrefixes,
        });
        logger = meshConfig.logger;
        const sourceIndex = meshConfig.sources.findIndex(rawSource => rawSource.name === args.source);
        if (sourceIndex === -1) {
          throw new Error(`Source ${args.source} not found`);
        }
        const meshInstance$ = getMesh({
          ...meshConfig,
          additionalTypeDefs: undefined,
          additionalResolvers: [],
          transforms: [],
          sources: [meshConfig.sources[sourceIndex]],
        });
        const serveMeshOptions: ServeMeshOptions = {
          baseDir,
          argsPort: 4000 + sourceIndex + 1,
          getBuiltMesh: () => meshInstance$,
          logger: meshConfig.logger.child('Server'),
          rawServeConfig: meshConfig.config.serve,
          playgroundTitle: `${args.source} GraphiQL`,
        };
        if (meshConfig.config.serve?.customServerHandler) {
          const customServerHandler = await loadFromModuleExportExpression<any>(
            meshConfig.config.serve.customServerHandler,
            {
              defaultExportName: 'default',
              cwd: baseDir,
              importFn: defaultImportFn,
            }
          );
          await customServerHandler(serveMeshOptions);
        } else {
          await serveMesh(serveMeshOptions, cliParams);
        }
      }
    ).argv;
}