process#cwd TypeScript Examples

The following examples show how to use process#cwd. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ConnectorLoader.ts    From connector-sdk with Apache License 2.0 6 votes vote down vote up
// When you are *inside* a project directory for a package named "foo", you
  // cannot typically `require("foo")`.
  // So we naively try to require the given library (so this can be run from
  // like a global install), but for the case where this is being run within a
  // connector's project directory, we look for a `package.json`, resolve the
  // appropriate include to an absolute path, and require that.
  private requiredClientLib(): { Client: any } {
    const packageName = `codeclimate-connector-${this.connectorSlug}`
    try {
      return require(packageName)
    } catch(ex) {
      if(ex.code === "MODULE_NOT_FOUND" && existsSync("package.json")) {
        const packageSpec = JSON.parse(readFileSync("package.json").toString())
        if (packageSpec["name"] === packageName) {
          let reqPath = resolve(
            cwd(),
            packageSpec["main"] || "index.js",
          )
          return require(reqPath)
        } else {
          throw ex
        }
      } else {
        throw ex
      }
    }
  }
Example #2
Source File: esbuild.ts    From vue-components-lib-seed with MIT License 6 votes vote down vote up
Promise.all([
  run(),
  run({
    format: 'cjs',
    outdir: `${cwd()}/dist/lib`,
  }),
])
  .then(async () => {
    await combineCss()
    await combineDepsCss()
    spinner.succeed('Done !')
  })
  .catch(() => {
    spinner.succeed('Failed !')
  })
Example #3
Source File: esbuild.ts    From vue-components-lib-seed with MIT License 6 votes vote down vote up
async function combineDepsCss() {
  const PATH_RE = /^\.*\//
  const alljs = klawSync(`${cwd()}/dist/es`, {
    nofile: true,
    depthLimit: 0,
  }).map((dir) => dir.path + '/index.js')
  await init
  alljs.forEach((js) => {
    const [imports] = parse(fs.readFileSync(js, 'utf-8'))
    const cssFile = resolve(dirname(js), './index.css')

    if (fs.existsSync(cssFile)) {
      const selfCss = `import './index.css'\n`
      const depsCss = imports
        .flat()
        .map((item) => item.n)
        .filter((n) => !n.endsWith('utils'))
        .filter((n) => PATH_RE.test(n))
        .map((n) => `import '${n}/index.css'`)
        .join('\n')
      const styleFile = resolve(dirname(js), './style.js')

      fs.writeFileSync(styleFile, depsCss + '\n' + selfCss)

      buildSync({
        entryPoints: [styleFile],
        format: 'cjs',
        allowOverwrite: true,
        outfile: resolve(
          dirname(js).replace('/es/', '/lib/'),
          './style.js'
        ),
      })
    }
  })
}
Example #4
Source File: esbuild.ts    From vue-components-lib-seed with MIT License 6 votes vote down vote up
async function combineCss() {
  const allCss = klawSync(`${cwd()}/dist/es`, {
    nofile: true,
    depthLimit: 0,
  }).map((dir) => dir.path + '/index.css')

  let content = ''
  for (const css of allCss) {
    if (fs.existsSync(css)) {
      content += await fs.promises.readFile(css, 'utf8')
    }
  }

  // override bundle css
  await Promise.all([
    fs.promises.writeFile(
      `${cwd()}/dist/es/my-lib.esm.css`,
      content
    ),
    fs.promises.writeFile(
      `${cwd()}/dist/lib/my-lib.umd.css`,
      content
    ),
  ])

  const name = 'my-lib.min.css'
  await Promise.all([
    fs.promises.rename(
      `${cwd()}/dist/es/my-lib.esm.css`,
      `${cwd()}/dist/es/${name}`
    ),
    fs.promises.rename(
      `${cwd()}/dist/lib/my-lib.umd.css`,
      `${cwd()}/dist/lib/${name}`
    ),
  ])
}
Example #5
Source File: esbuild.ts    From vue-components-lib-seed with MIT License 6 votes vote down vote up
/**
 * @deprecated
 */
async function bundle(options?: BuildOptions) {
  await build({
    outfile: `${cwd()}/dist/es/my-lib.esm.js`,
    bundle: true,
    entryPoints: [`${cwd()}/src/packages/my-lib.ts`],
    plugins: [vue()],
    loader: { '.png': 'dataurl' },
    external: ['vue', 'my-lib/*', '@vue/*'],
    format: 'esm',
    minify: true,
    ...options,
  })
}
Example #6
Source File: esbuild.ts    From vue-components-lib-seed with MIT License 6 votes vote down vote up
async function run(options?: BuildOptions) {
  await build({
    outdir: `${cwd()}/dist/es`,
    bundle: true,
    entryPoints: componentEntrys,
    plugins: [
      vue({
        sourceMap: false,
        style: {
          preprocessLang: 'styl',
          // preprocessOptions: {
          //   stylus: {
          //     additionalData: `@import '${process.cwd()}/src/styles/index.styl'`,
          //   },
          // },
        },
      }),
    ],
    loader: { '.png': 'dataurl' },
    external: [
      'vue',
      'my-lib/*',
      '@vue/*',
      '@better-scroll/*',
      'jpeg-js',
    ],
    format: 'esm',
    minify: false,
    ...options,
  })
}
Example #7
Source File: expandedPreviewer.ts    From cli with Apache License 2.0 6 votes vote down vote up
private async applySnapshotToPreview(dirPath: string) {
    recursiveDirectoryDiff(
      join(dirPath, 'resources'),
      relative(cwd(), this.projectToPreview.resourcePath),
      this.shouldDelete
    );
    await spawnProcess('git', ['add', '.'], {cwd: dirPath, stdio: 'ignore'});
    await spawnProcess(
      'git',
      ['commit', `--message=${this.orgId} after snapshot application`],
      {
        cwd: dirPath,
        stdio: 'ignore',
      }
    );
  }
Example #8
Source File: read-file-or-url.spec.ts    From graphql-mesh with MIT License 6 votes vote down vote up
describe('readFile', () => {
  it('should convert relative paths to absolute paths correctly', async () => {
    const tmpFileAbsolutePath = join(tmpdir(), './tmpfile.json');
    const tmpFileContent = {
      test: 'TEST',
    };
    writeFileSync(tmpFileAbsolutePath, JSON.stringify(tmpFileContent));
    const tmpFileRelativePath = relative(cwd(), tmpFileAbsolutePath);
    const receivedFileContent = await readFile(tmpFileRelativePath);
    expect(receivedFileContent).toStrictEqual(tmpFileContent);
  });
  it('should respect absolute paths correctly', async () => {
    const tmpFileAbsolutePath = join(tmpdir(), './tmpfile.json');
    const tmpFileContent = {
      test: 'TEST',
    };
    writeFileSync(tmpFileAbsolutePath, JSON.stringify(tmpFileContent));
    const receivedFileContent = await readFile(tmpFileAbsolutePath);
    expect(receivedFileContent).toStrictEqual(tmpFileContent);
  });
});
Example #9
Source File: expandedPreviewer.ts    From cli with Apache License 2.0 5 votes vote down vote up
private async getCommitHash(dirPath: string) {
    return spawnProcessOutput('git', ['rev-parse', '--short', 'HEAD'], {
      cwd: dirPath,
    });
  }
Example #10
Source File: expandedPreviewer.ts    From cli with Apache License 2.0 5 votes vote down vote up
private async initialPreviewCommit(dirPath: string) {
    await spawnProcess('git', ['init'], {cwd: dirPath, stdio: 'ignore'});
    await spawnProcess('git', ['add', '.'], {cwd: dirPath, stdio: 'ignore'});
    await spawnProcess('git', ['commit', `--message=${this.orgId} currently`], {
      cwd: dirPath,
      stdio: 'ignore',
    });
  }
Example #11
Source File: constant.ts    From fect with MIT License 5 votes vote down vote up
CWD = cwd()
Example #12
Source File: pull.spec.ts    From cli with Apache License 2.0 5 votes vote down vote up
pathToStub = join(cwd(), 'src', '__stub__')
Example #13
Source File: gen-dts.ts    From vue-components-lib-seed with MIT License 5 votes vote down vote up
TSCONFIG_PATH = path.resolve(cwd(), 'tsconfig.json')
Example #14
Source File: add.spec.ts    From cli with Apache License 2.0 4 votes vote down vote up
describe('source:push:add', () => {
  beforeAll(() => {
    mockedGlobalConfig.get.mockReturnValue({
      configDir: 'the_config_dir',
    } as Interfaces.Config);
  });

  const pathToStub = join(cwd(), 'src', '__stub__');
  const mockSetSourceStatus = jest.fn();
  const mockBatchUpdate = jest.fn();

  const mockUserHavingAllRequiredPlatformPrivileges = () => {
    mockEvaluate.mockResolvedValue({approved: true});
  };

  const mockUserNotHavingAllRequiredPlatformPrivileges = () => {
    mockEvaluate.mockResolvedValue({approved: false});
  };

  const doMockSuccessBatchUpload = () => {
    mockBatchUpdate.mockReturnValue(new BatchUploadDocumentsSuccess());
  };

  const doMockErrorBatchUpload = () => {
    mockBatchUpdate.mockReturnValue(new BatchUploadDocumentsError());
  };

  beforeAll(() => {
    mockedGlobalConfig.get.mockReturnValue({
      configDir: 'the_config_dir',
    } as Interfaces.Config);
  });

  beforeEach(() => {
    mockedMarshal.mockReturnValue(
      JSON.stringify({
        documentId: 'https://perdu.com',
        title: 'hello world',
      })
    );
  });

  mockedDocumentBuilder.mockImplementation(
    () =>
      ({
        marshal: mockedMarshal,
        withData: jest.fn(),
        withDate: jest.fn(),
        withFileExtension: jest.fn(),
        withMetadataValue: jest.fn(),
      } as unknown as DocumentBuilder)
  );

  mockedClient.mockImplementation(
    () =>
      ({
        getClient: () =>
          Promise.resolve({
            privilegeEvaluator: {
              evaluate: mockEvaluate,
            },
          }),
        cfg: {
          get: () =>
            Promise.resolve({
              accessToken: 'the_token',
              organization: 'the_org',
              region: 'au',
              environment: 'prod',
            }),
        },
      } as unknown as AuthenticatedClient)
  );

  mockedSource.mockImplementation(
    () =>
      ({
        batchUpdateDocumentsFromFiles: mockBatchUpdate,
        setSourceStatus: mockSetSourceStatus,
      } as unknown as PushSource)
  );

  describe('when the batch upload is successfull', () => {
    beforeAll(() => {
      mockUserHavingAllRequiredPlatformPrivileges();
      doMockSuccessBatchUpload();
    });

    afterAll(() => {
      mockBatchUpdate.mockReset();
      mockEvaluate.mockReset();
    });

    test
      .stdout()
      .stderr()
      .command(['source:push:add', 'mysource'])
      .catch(/You must set the `files` flag/)
      .it('throws when no flags are specified');

    test
      .stdout()
      .stderr()
      .command([
        'source:push:add',
        'mysource',
        '-f',
        'foo.json',
        '-d',
        'directory',
      ])
      .it('should accept files and folder within the same command', () => {
        expect(mockBatchUpdate).toHaveBeenCalledWith(
          expect.anything(),
          ['directory', 'foo.json'],
          expect.anything()
        );
      });

    test
      .stdout()
      .stderr()
      .command([
        'source:push:add',
        'mysource',
        '-f',
        join(pathToStub, 'jsondocuments', 'batman.json'),
      ])
      .it('pass correct configuration information to push-api-client', () => {
        expect(mockedSource).toHaveBeenCalledWith('the_token', 'the_org', {
          environment: 'prod',
          region: 'au',
        });
      });

    test
      .stdout()
      .stderr()
      .command([
        'source:push:add',
        'mysource',
        '-f',
        join(pathToStub, 'jsondocuments', 'batman.json'),
      ])
      .it('should create missing fields by default', () => {
        expect(mockBatchUpdate).toHaveBeenCalledWith(
          expect.anything(),
          expect.anything(),
          expect.objectContaining({createFields: true})
        );
      });

    test
      .stdout()
      .stderr()
      .command([
        'source:push:add',
        'mysource',
        '--no-createMissingFields',
        '-f',
        join(pathToStub, 'jsondocuments', 'batman.json'),
      ])
      .it('should skip field creation if specified', () => {
        expect(mockBatchUpdate).toHaveBeenCalledWith(
          expect.anything(),
          expect.anything(),
          expect.objectContaining({createFields: false})
        );
      });

    test
      .stdout()
      .stderr()
      .command([
        'source:push:add',
        'mysource',
        '-f',
        join(pathToStub, 'jsondocuments', 'someJsonFile.json'),
      ])
      .it('should output feedback message when uploading documents', (ctx) => {
        expect(ctx.stdout).toContain(
          'Success: 2 documents accepted by the Push API from'
        );
        expect(ctx.stdout).toContain('Status code: 202 ?');
      });

    test
      .stdout()
      .stderr()
      .command([
        'source:push:add',
        'mysource',
        '-d',
        join(pathToStub, 'jsondocuments'),
      ])
      .it(
        'should output feedback message when uploading a directory',
        (ctx) => {
          expect(ctx.stdout).toContain(
            'Success: 2 documents accepted by the Push API from'
          );
          expect(ctx.stdout).toContain('Status code: 202 ?');
        }
      );

    test
      .stdout()
      .stderr()
      .command([
        'source:push:add',
        'mysource',
        '-f',
        join(pathToStub, 'jsondocuments'),
        join(pathToStub, 'jsondocuments', 'batman.json'),
      ])
      .it(
        'should output feedback message when uploading a file and a directory',
        (ctx) => {
          expect(ctx.stdout).toContain(
            'Success: 2 documents accepted by the Push API from'
          );
          expect(ctx.stdout).toContain('Status code: 202 ?');
        }
      );

    test
      .stdout()
      .stderr()
      .command([
        'source:push:add',
        'mysource',
        '-d',
        join(pathToStub, 'jsondocuments'),
      ])
      .it('should show deprecated flag warning', (ctx) => {
        expect(ctx.stdout).toContain('Use the `files` flag instead');
      });

    test
      .stdout()
      .stderr()
      .command([
        'source:push:add',
        'mysource',
        '-d',
        join(pathToStub, 'jsondocuments'),
      ])
      .it('should update the source status', () => {
        expect(mockSetSourceStatus).toHaveBeenNthCalledWith(
          1,
          'mysource',
          'REFRESH'
        );
        expect(mockSetSourceStatus).toHaveBeenNthCalledWith(
          2,
          'mysource',
          'IDLE'
        );
      });
  });

  describe('when the batch upload fails', () => {
    beforeAll(() => {
      doMockErrorBatchUpload();
      mockUserHavingAllRequiredPlatformPrivileges();
    });

    afterAll(() => {
      mockBatchUpdate.mockReset();
      mockEvaluate.mockReset();
    });

    test
      .stdout()
      .stderr()
      .command([
        'source:push:add',
        'mysource',
        '-d',
        join(pathToStub, 'jsondocuments'),
      ])
      .catch((error) => {
        expect(error).toBeInstanceOf(APIError);
        const message = stripAnsi(error.message);
        expect(message).toContain(
          'this is a bad request and you should feel bad'
        );
        expect(message).toContain('Status code: 412');
        expect(message).toContain('Error code: BAD_REQUEST');
      })
      .it('returns an information message on add failure from the API');
  });

  describe('when Platform privilege preconditions are not respected', () => {
    beforeEach(() => {
      mockUserNotHavingAllRequiredPlatformPrivileges();
    });

    afterAll(() => {
      mockBatchUpdate.mockReset();
      mockEvaluate.mockReset();
    });

    test
      .stdout()
      .stderr()
      .command(['source:push:add', 'some-org', '-f', 'some-file'])
      .catch(/You are not authorized to create or update fields/)
      .it('should return a precondition error');
  });
});
Example #15
Source File: orgResources.specs.ts    From cli with Apache License 2.0 4 votes vote down vote up
describe('org:resources', () => {
  let testOrgId = '';
  const {accessToken} = getConfig();
  const snapshotProjectPath = join(getUIProjectPath(), 'snapshot-project');
  const defaultTimeout = 10 * 60e3;
  let processManager: ProcessManager;
  let platformClient: PlatformClient;
  const pathToStub = join(cwd(), '__stub__');

  const createNewTerminal = (
    args: string[],
    procManager: ProcessManager,
    cwd: string,
    debugName: string
  ) => {
    return new Terminal('node', args, {cwd}, procManager, debugName);
  };

  const createFieldWithoutUsingSnapshot = async (client: PlatformClient) => {
    await client.field.create({
      description: '',
      facet: false,
      includeInQuery: true,
      includeInResults: true,
      mergeWithLexicon: false,
      multiValueFacet: false,
      multiValueFacetTokenizers: ';',
      name: 'firstfield',
      ranking: false,
      sort: false,
      stemming: false,
      system: false,
      type: FieldTypes.STRING,
      useCacheForComputedFacet: false,
      useCacheForNestedQuery: false,
      useCacheForNumericQuery: false,
      useCacheForSort: false,
    });
  };

  const previewChange = (
    targetOrg: string,
    procManager: ProcessManager,
    debugName = 'org-config-preview'
  ) => {
    const args: string[] = [
      process.env.CLI_EXEC_PATH!,
      'org:resources:preview',
      `-o=${targetOrg}`,
      '--sync',
      '--wait=0',
      '-p=light',
    ];

    return createNewTerminal(args, procManager, snapshotProjectPath, debugName);
  };

  const pushToOrg = async (
    targetOrg: string,
    procManager: ProcessManager,
    debugName = 'org-config-push'
  ) => {
    const args: string[] = [
      process.env.CLI_EXEC_PATH!,
      'org:resources:push',
      '--skipPreview',
      `-o=${targetOrg}`,
      '--wait=0',
    ];
    const pushTerminal = createNewTerminal(
      args,
      procManager,
      snapshotProjectPath,
      debugName
    );

    await pushTerminal.when('exit').on('process').do().once();
  };

  const addOrgIdToModel = (
    fromModelPath: string,
    destinationModelPath: string,
    orgId: string
  ) => {
    const model = readJsonSync(fromModelPath);
    writeJsonSync(destinationModelPath, {...model, orgId});
  };

  const pullFromOrg = async (
    procManager: ProcessManager,
    destinationPath: string,
    additionalFlags: string[] = [],
    debugName: string
  ) => {
    const args: string[] = [
      process.env.CLI_EXEC_PATH!,
      'org:resources:pull',
      '-o',
      '--wait=0',
      '--no-git',
      ...additionalFlags,
    ];

    const pullTerminal = createNewTerminal(
      args,
      procManager,
      destinationPath,
      debugName
    );

    const pullTerminalExitPromise = pullTerminal
      // TODO: CDX-744: understand why cannot use process.on('exit')
      .when(/Project updated/)
      .on('stderr')
      .do()
      .once();

    await pullTerminal
      .when(isGenericYesNoPrompt)
      .on('stderr')
      .do(answerPrompt(`y${EOL}`))
      .until(pullTerminalExitPromise);
  };

  beforeAll(async () => {
    testOrgId = await getTestOrg();
    copySync('snapshot-project', snapshotProjectPath);
    platformClient = getPlatformClient(testOrgId, accessToken);
    processManager = new ProcessManager();
  }, 5 * 60e3);

  afterAll(async () => {
    await processManager.killAllProcesses();
  });

  describe('org:resources:preview', () => {
    describe('when resources are synchronized', () => {
      let stdout = '';
      const stdoutListener = (chunk: string) => {
        stdout += chunk;
      };

      it(
        'should preview the snapshot',
        async () => {
          const previewTerminal = previewChange(
            testOrgId,
            processManager,
            'org-config-preview-sync'
          );

          const expectedOutput = [
            'Extensions',
            '\\+   1 to create',
            'Fields',
            '\\+   2 to create',
          ].join('\\s*');
          const regex = new RegExp(expectedOutput, 'gm');

          previewTerminal.orchestrator.process.stdout.on(
            'data',
            stdoutListener
          );

          const previewTerminalExitPromise = previewTerminal
            .when('exit')
            .on('process')
            .do((proc) => {
              proc.stdout.off('data', stdoutListener);
            })
            .once();

          await previewTerminalExitPromise;
          expect(stdout).toMatch(regex);
        },
        defaultTimeout
      );
    });

    // TODO CDX-753: Create new unsynchronized state for E2E tests.
    describe.skip('when resources are not synchronized', () => {
      let stdout: string;
      let stderr: string;

      const stdoutListener = (chunk: string) => {
        stdout += chunk;
      };
      const stderrListener = (chunk: string) => {
        stderr += chunk;
      };

      beforeAll(async () => {
        stdout = stderr = '';
        await createFieldWithoutUsingSnapshot(platformClient);
      });

      it(
        'should throw a synchronization warning on a field',
        async () => {
          const previewTerminal = previewChange(
            testOrgId,
            processManager,
            'org-config-preview-unsync'
          );

          const process = previewTerminal.orchestrator.process;
          process.stdout.on('data', stdoutListener);
          process.stderr.on('data', stderrListener);

          const previewTerminalExitPromise = previewTerminal
            .when('exit')
            .on('process')
            .do((proc) => {
              proc.stdout.off('data', stdoutListener);
              proc.stderr.off('data', stderrListener);
            })
            .once();

          await previewTerminalExitPromise;
          expect(stdout).toMatch(/Previewing snapshot changes/);
          expect(stderr).toMatch(/Checking for automatic synchronization/);
        },
        defaultTimeout
      );
    });
  });

  describe('org:resources:push', () => {
    beforeAll(async () => {
      await pushToOrg(testOrgId, processManager);
    }, defaultTimeout);

    it('should have pushed fields', async () => {
      const fields = (await platformClient.field.list()).items;
      expect(fields).toEqual(
        expect.arrayContaining([
          expect.objectContaining({name: 'firstfield'}),
          expect.objectContaining({name: 'whereisbrian'}),
        ])
      );
    });

    it('should have pushed extensions', async () => {
      const extensions = await platformClient.extension.list();
      expect(extensions).toEqual(
        expect.arrayContaining([expect.objectContaining({name: 'palpatine'})])
      );
    });
  });

  describe('org:resources:pull', () => {
    const destinationPath = getProjectPath('new-snapshot-project');
    const getResourceFolderContent = (projectPath: string) =>
      readdirSync(join(projectPath, 'resources'));

    beforeEach(() => {
      rmSync(destinationPath, {recursive: true, force: true});
      ensureDirSync(destinationPath);
    });

    it(
      "should pull the org's content",
      async () => {
        await pullFromOrg(
          processManager,
          destinationPath,
          ['-o', testOrgId],
          'org-resources-pull-all'
        );
        const snapshotFiles = readdirSync(snapshotProjectPath);
        const destinationFiles = readdirSync(destinationPath);

        expect(snapshotFiles).toEqual(destinationFiles);
      },
      defaultTimeout
    );

    it(
      'directory should only contain pulled resources and manifest',
      async () => {
        await pullFromOrg(
          processManager,
          destinationPath,
          ['-o', testOrgId, '-r', 'FIELD'],
          'org-resources-pull-all-fields'
        );
        const originalResources = getResourceFolderContent(snapshotProjectPath);
        const destinationResources = getResourceFolderContent(destinationPath);

        expect(destinationResources.length).toBeGreaterThan(0);
        expect(destinationResources.length).toBeLessThan(
          originalResources.length + 1
        );
      },
      defaultTimeout
    );

    it(
      'snapshot should only contain one single field',
      async () => {
        const fixtureModelPath = join(
          pathToStub,
          'snapshotPullModel',
          'oneFieldOnly.json'
        );
        const tmpModel = fileSync({postfix: '.json'});
        addOrgIdToModel(fixtureModelPath, tmpModel.name, testOrgId);
        await pullFromOrg(
          processManager,
          destinationPath,
          ['-m', tmpModel.name],
          'org-resources-pull-one-field'
        );
        const fields = readJsonSync(
          join(destinationPath, 'resources', 'FIELD.json')
        );

        expect(fields.resources.FIELD.length).toBe(1);
      },
      defaultTimeout
    );
  });

  it('should not have any snapshot in the target org', async () => {
    const snapshotlist = await platformClient.resourceSnapshot.list();
    expect(snapshotlist).toHaveLength(0);
  });
});
Example #16
Source File: add.spec.ts    From cli with Apache License 2.0 4 votes vote down vote up
describe('source:catalog:add', () => {
  const pathToStub = join(cwd(), 'src', '__stub__');

  const sourceContainsDocuments = () => {
    mockSourceGet.mockResolvedValue({
      information: {numberOfDocuments: 1},
    });
  };

  const sourceDoesNotContainDocuments = () => {
    mockSourceGet.mockResolvedValue({
      information: {numberOfDocuments: 0},
    });
  };

  const mockUserHavingAllRequiredPlatformPrivileges = () => {
    mockEvaluate.mockResolvedValue({approved: true});
  };

  const mockUserNotHavingAllRequiredPlatformPrivileges = () => {
    mockEvaluate.mockResolvedValue({approved: false});
  };

  const doMockSuccessUpload = () => {
    mockBatchUpdate.mockReturnValue(new BatchUploadDocumentsSuccess());
    mockBatchStream.mockReturnValue(new BatchUploadDocumentsSuccess());
  };

  const doMockErrorUpload = () => {
    mockBatchUpdate.mockReturnValue(new BatchUploadDocumentsError());
    mockBatchStream.mockReturnValue(new BatchUploadDocumentsError());
  };

  const doMockDocumentBuilder = () => {
    mockedDocumentBuilder.mockImplementation(
      () =>
        ({
          marshal: mockedMarshal,
          withData: jest.fn(),
          withDate: jest.fn(),
          withFileExtension: jest.fn(),
          withMetadataValue: jest.fn(),
        } as unknown as DocumentBuilder)
    );
  };
  const doMockAuthenticatedClient = () => {
    mockedClient.mockImplementation(
      () =>
        ({
          getClient: () =>
            Promise.resolve({
              privilegeEvaluator: {
                evaluate: mockEvaluate,
              },
              source: {
                get: mockSourceGet,
              },
            }),
          cfg: {
            get: () =>
              Promise.resolve({
                accessToken: 'the_token',
                organization: 'the_org',
                region: 'au',
                environment: 'prod',
              }),
          },
        } as unknown as AuthenticatedClient)
    );
  };

  const doMockSource = () => {
    mockedSource.mockImplementation(
      () =>
        ({
          batchUpdateDocumentsFromFiles: mockBatchUpdate,
          batchStreamDocumentsFromFiles: mockBatchStream,
          setSourceStatus: mockSetSourceStatus,
        } as unknown as CatalogSource)
    );
  };

  beforeAll(() => {
    doMockDocumentBuilder();
    doMockAuthenticatedClient();
    doMockSource();

    mockedGlobalConfig.get.mockReturnValue({
      configDir: 'the_config_dir',
    } as Interfaces.Config);
  });

  beforeEach(() => {
    mockedMarshal.mockReturnValue(
      JSON.stringify({
        documentId: 'https://perdu.com',
        title: 'hello world',
      })
    );
  });

  mockedSource.mockImplementation(
    () =>
      ({
        batchUpdateDocumentsFromFiles: mockBatchUpdate,
        setSourceStatus: mockSetSourceStatus,
      } as unknown as CatalogSource)
  );

  describe('when the batch upload is successfull', () => {
    beforeAll(() => {
      mockUserHavingAllRequiredPlatformPrivileges();
      doMockSuccessUpload();
      sourceContainsDocuments();
    });

    afterAll(() => {
      mockBatchUpdate.mockReset();
      mockEvaluate.mockReset();
      mockSourceGet.mockReset();
    });

    test
      .stdout()
      .stderr()
      .command(['source:catalog:add', 'mysource'])
      .catch(/You must set the `files` flag/)
      .it('throws when no flags are specified');

    test
      .stdout()
      .stderr()
      .command([
        'source:catalog:add',
        'mysource',
        '-f',
        join(pathToStub, 'jsondocuments', 'batman.json'),
      ])
      .it('should trigger a batch document update', () => {
        expect(mockBatchUpdate).toHaveBeenCalled();
        expect(mockBatchStream).not.toHaveBeenCalled();
      });

    test
      .stdout()
      .stderr()
      .command([
        'source:catalog:add',
        'mysource',
        '--fullUpload',
        '-f',
        join(pathToStub, 'jsondocuments', 'batman.json'),
      ])
      .it('should trigger a batch stream upload ', () => {
        expect(mockBatchStream).toHaveBeenCalled();
        expect(mockBatchUpdate).not.toHaveBeenCalled();
      });

    test
      .stdout()
      .stderr()
      .command([
        'source:catalog:add',
        'mysource',
        '-f',
        join(pathToStub, 'jsondocuments', 'batman.json'),
      ])
      .it('pass correct configuration information to push-api-client', () => {
        expect(mockedSource).toHaveBeenCalledWith('the_token', 'the_org', {
          environment: 'prod',
          region: 'au',
        });
      });

    test
      .stdout()
      .stderr()
      .command([
        'source:catalog:add',
        'mysource',
        '-f',
        join(pathToStub, 'jsondocuments', 'batman.json'),
      ])
      .it('should create missing fields by default', () => {
        expect(mockBatchUpdate).toHaveBeenCalledWith(
          expect.anything(),
          expect.anything(),
          expect.objectContaining({createFields: true})
        );
      });

    test
      .stdout()
      .stderr()
      .command([
        'source:catalog:add',
        'mysource',
        '--no-createMissingFields',
        '-f',
        join(pathToStub, 'jsondocuments', 'batman.json'),
      ])
      .it('should skip field creation if specified', () => {
        expect(mockBatchUpdate).toHaveBeenCalledWith(
          expect.anything(),
          expect.anything(),
          expect.objectContaining({createFields: false})
        );
      });

    test
      .stdout()
      .stderr()
      .command([
        'source:catalog:add',
        'mysource',
        '-f',
        join(pathToStub, 'jsondocuments', 'someJsonFile.json'),
      ])
      .it('should output feedback message when uploading documents', (ctx) => {
        expect(ctx.stdout).toContain(
          'Success: 2 documents accepted by the API from'
        );
        expect(ctx.stdout).toContain('Status code: 202 ?');
      });

    test
      .stdout()
      .stderr()
      .command([
        'source:catalog:add',
        'mysource',
        '-f',
        join(pathToStub, 'jsondocuments'),
      ])
      .it(
        'should output feedback message when uploading a directory',
        (ctx) => {
          expect(ctx.stdout).toContain(
            'Success: 2 documents accepted by the API from'
          );
          expect(ctx.stdout).toContain('Status code: 202 ?');
        }
      );

    test
      .stdout()
      .stderr()
      .command([
        'source:catalog:add',
        'mysource',
        '-f',
        join(pathToStub, 'jsondocuments', 'batman.json'),
      ])
      .it('should get source info during document update', () => {
        expect(mockSourceGet).toHaveBeenCalled();
      });

    test
      .stdout()
      .stderr()
      .command([
        'source:catalog:add',
        'mysource',
        '--fullUpload',
        '-f',
        join(pathToStub, 'jsondocuments', 'batman.json'),
      ])
      .it('should not get source info during document update', () => {
        expect(mockSourceGet).not.toHaveBeenCalled();
      });

    describe('when the source does not contain items', () => {
      beforeAll(() => {
        mockSourceGet.mockReset();
        sourceDoesNotContainDocuments();
      });

      test
        .stdout()
        .stderr()
        .command([
          'source:catalog:add',
          'mysource',
          '-f',
          join(pathToStub, 'jsondocuments', 'batman.json'),
        ])
        .catch(/No items detected for this source at the moment/)
        .it('should show error message during document update');

      test
        .stdout()
        .stderr()
        .command([
          'source:catalog:add',
          'mysource',
          '--skipFullUploadCheck',
          '-f',
          join(pathToStub, 'jsondocuments', 'batman.json'),
        ])
        .it(
          'should not show a error message when using the --skipFullUploadCheck flag',
          (ctx) => {
            expect(ctx.stderr).not.toContain(
              'please consider doing a full catalog upload'
            );
          }
        );
    });
  });

  describe('when the batch upload fails', () => {
    beforeAll(() => {
      doMockErrorUpload();
      mockUserHavingAllRequiredPlatformPrivileges();
      sourceContainsDocuments();
    });

    afterAll(() => {
      mockBatchUpdate.mockReset();
      mockEvaluate.mockReset();
      mockSourceGet.mockReset();
    });

    test
      .stdout()
      .stderr()
      .command([
        'source:catalog:add',
        'mysource',
        '-f',
        join(pathToStub, 'jsondocuments'),
      ])
      .catch((error) => {
        expect(error).toBeInstanceOf(APIError);
        const message = stripAnsi(error.message);
        expect(message).toContain(
          'this is a bad request and you should feel bad'
        );
        expect(message).toContain('Status code: 412');
        expect(message).toContain('Error code: BAD_REQUEST');
      })
      .it('returns an information message on add failure from the API');
  });

  describe('when Platform privilege preconditions are not respected', () => {
    beforeEach(() => {
      mockUserNotHavingAllRequiredPlatformPrivileges();
      sourceContainsDocuments();
    });

    afterAll(() => {
      mockBatchUpdate.mockReset();
      mockEvaluate.mockReset();
      mockSourceGet.mockReset();
    });

    test
      .stdout()
      .stderr()
      .command(['source:catalog:add', 'some-org', '-f', 'some-file'])
      .catch(/You are not authorized to create or update fields/)
      .it('should return a precondition error');
  });
});
Example #17
Source File: pull.spec.ts    From cli with Apache License 2.0 4 votes vote down vote up
describe('org:resources:pull', () => {
  beforeAll(() => {
    doMockConfig();
    doMockSnapshotFactory();
    mockAuthenticatedClient();

    mockedGetSnapshot.mockResolvedValue(
      getDummySnapshotModel('default-org', 'my-snapshot', [
        getSuccessReport('my-snapshot', ResourceSnapshotsReportType.Apply),
      ])
    );
  });

  beforeEach(() => {
    doMockPreconditions();
    mockUserHavingAllRequiredPlatformPrivileges();
  });

  afterEach(() => {
    mockEvaluate.mockClear();
    mockedProject.mockClear();
    mockedIsGitInstalled.mockClear();
  });

  test
    .do(() => {
      mockUserNotHavingAllRequiredPlatformPrivileges();
    })
    .stdout()
    .stderr()
    .command(['org:resources:pull'])
    .catch(/You are not authorized to create snapshot/)
    .it('should return an error message if privileges are missing');

  test
    .stdout()
    .stderr()
    .command(['org:resources:pull'])
    .it('should use the cwd as project', () => {
      expect(mockedProject).toHaveBeenCalledWith(cwd(), expect.anything());
    });

  test
    .stdout()
    .stderr()
    .command(['org:resources:pull', '-o', 'someorgid'])
    .it('should create Project with someOrgId', () => {
      expect(mockedProject).toHaveBeenCalledWith(
        expect.anything(),
        'someorgid'
      );
    });

  test
    .stdout()
    .stderr()
    .command(['org:resources:pull'])
    .it('should download the snapshot', () => {
      expect(mockedDownloadSnapshot).toHaveBeenCalled();
    });

  test
    .stdout()
    .stderr()
    .command(['org:resources:pull'])
    .it('should write the project resources manifest', () => {
      expect(mockedProject.prototype.writeResourcesManifest).toHaveBeenCalled();
    });

  test
    .stdout()
    .stderr()
    .command(['org:resources:pull'])
    .it('should delete the snapshot', () => {
      expect(mockedDeleteSnapshot).toHaveBeenCalled();
    });

  test
    .stdout()
    .stderr()
    .command(['org:resources:pull'])
    .it('should select all resource types', () => {
      const resourcesToExport = {
        FIELD: ['*'],
        FEATURED_RESULT: ['*'],
        SOURCE: ['*'],
        QUERY_PIPELINE: ['*'],
        SEARCH_PAGE: ['*'],
        EXTENSION: ['*'],
      };
      expect(mockedSnapshotFactory.createFromOrg).toHaveBeenCalledWith(
        expect.objectContaining(resourcesToExport),
        'default-org',
        expect.objectContaining({})
      );
    });

  test
    .stdout()
    .stderr()
    .command(['org:resources:pull', '-r', 'FIELD', 'FEATURED_RESULT', 'SOURCE'])
    .it('should select specified resource types', () => {
      const resourcesToExport = {
        FIELD: ['*'],
        FEATURED_RESULT: ['*'],
        SOURCE: ['*'],
      };
      expect(mockedSnapshotFactory.createFromOrg).toHaveBeenCalledWith(
        resourcesToExport,
        'default-org',
        expect.objectContaining({})
      );
    });

  test
    .stdout()
    .stderr()
    .stub(CliUx.ux, 'confirm', () => async () => true)
    .command([
      'org:resources:pull',
      '-m',
      join(pathToStub, 'snapshotPullModels', 'full.json'),
    ])
    .it(
      'should create a snapshot with all resource types specified in the model',
      () => {
        const resourcesToExport = {
          FIELD: ['*'],
          QUERY_PIPELINE: ['*'],
          ML_MODEL: ['*'],
          SUBSCRIPTION: ['*'],
        };
        expect(mockedSnapshotFactory.createFromOrg).toHaveBeenCalledWith(
          resourcesToExport,
          expect.anything(),
          expect.objectContaining({})
        );
      }
    );

  test
    .stdout()
    .stderr()
    .stub(CliUx.ux, 'confirm', () => async () => true)
    .command([
      'org:resources:pull',
      '-m',
      join(pathToStub, 'snapshotPullModels', 'subset.json'),
    ])
    .it(
      'should create a snapshot with only the specified resource items in the model',
      () => {
        const resourcesToExport = {
          FIELD: ['author', 'source', 'title'],
          QUERY_PIPELINE: ['default', 'agentPanel'],
          ML_MODEL: null,
          SEARCH_PAGE: null,
          SUBSCRIPTION: null,
        };
        expect(mockedSnapshotFactory.createFromOrg).toHaveBeenCalledWith(
          resourcesToExport,
          expect.anything(),
          expect.objectContaining({})
        );
      }
    );

  test
    .stdout()
    .stderr()
    .stub(CliUx.ux, 'confirm', () => async () => true)
    .command([
      'org:resources:pull',
      '-m',
      join(pathToStub, 'snapshotPullModels', 'full.json'),
    ])
    .it('should use the orgId from the model', () => {
      expect(mockedProject).toHaveBeenCalledWith(expect.anything(), 'myorgid');
      expect(mockedSnapshotFactory.createFromOrg).toHaveBeenCalledWith(
        expect.anything(),
        'myorgid',
        expect.objectContaining({})
      );
    });

  test
    .stdout()
    .stderr()
    .command([
      'org:resources:pull',
      '-m',
      join(pathToStub, 'snapshotPullModels', 'missingOrgId.json'),
    ])
    .it('should use the orgId from the config', () => {
      expect(mockedSnapshotFactory.createFromOrg).toHaveBeenCalledWith(
        expect.anything(),
        'default-org',
        expect.objectContaining({})
      );
    });

  test
    .stdout()
    .stderr()
    .command([
      'org:resources:pull',
      '-m',
      join(pathToStub, 'snapshotPullModels', 'missingResources.json'),
    ])
    .catch(/requires property "resourcesToExport"/)
    .it('should throw an invalid model error');

  test
    .stdout()
    .stderr()
    .command(['org:resources:pull'])
    .it('should set a 60 seconds timeout', () => {
      expect(mockedSnapshotFactory.createFromOrg).toHaveBeenCalledWith(
        expect.arrayContaining([]),
        'default-org',
        {wait: 60}
      );
    });

  test
    .stdout()
    .stderr()
    .command(['org:resources:pull', '-w', '78'])
    .it('should set a 78 seconds timeout', () => {
      expect(mockedSnapshotFactory.createFromOrg).toHaveBeenCalledWith(
        expect.arrayContaining([]),
        'default-org',
        {wait: 78}
      );
    });

  test
    .stdout()
    .stderr()
    .command(['org:resources:pull', '-r', 'invalidresource'])
    .catch(/Expected --resourceTypes=invalidresource to be one of/)
    .it('should not allow invalid resource');
});
Example #18
Source File: preview.spec.ts    From cli with Apache License 2.0 4 votes vote down vote up
describe('org:resources:preview', () => {
  const doMockPreconditions = function () {
    const mockGit = (_target: Command) => Promise.resolve();
    mockedIsGitInstalled.mockReturnValue(mockGit);
  };

  beforeAll(() => {
    mockConfig();
    mockProject();
    mockAuthenticatedClient();
  });

  beforeEach(() => {
    doMockPreconditions();
    mockUserHavingAllRequiredPlatformPrivileges();
  });

  afterEach(() => {
    mockEvaluate.mockClear();
    mockedIsGitInstalled.mockClear();
  });

  describe('when the report contains no resources in error', () => {
    beforeAll(async () => {
      await mockSnapshotFactoryReturningValidSnapshot();
    });

    afterAll(() => {
      mockedSnapshotFactory.mockReset();
    });

    test
      .do(() => {
        mockUserNotHavingAllRequiredPlatformPrivileges();
      })
      .stdout()
      .stderr()
      .command(['org:resources:preview'])
      .catch(/You are not authorized to create snapshot/)
      .it('should return an error message if privileges are missing');

    test
      .stdout()
      .stderr()
      .command(['org:resources:preview'])
      .it('should use cwd as project', () => {
        expect(mockedProject).toHaveBeenCalledWith(cwd(), 'foo');
      });

    test
      .stdout()
      .stderr()
      .command(['org:resources:preview'])
      .it('should work with default connected org', () => {
        expect(mockedSnapshotFactory.createFromZip).toHaveBeenCalledWith(
          normalize(join('path', 'to', 'resources.zip')),
          'foo',
          expect.objectContaining({})
        );
      });

    test
      .stdout()
      .stderr()
      .command(['org:resources:preview', '-o', 'myorg'])
      .it('should work with specified target org', () => {
        expect(mockedSnapshotFactory.createFromZip).toHaveBeenCalledWith(
          normalize(join('path', 'to', 'resources.zip')),
          'myorg',
          expect.objectContaining({})
        );
      });

    test
      .stdout()
      .stderr()
      .command(['org:resources:preview'])
      .it('should set a 60 seconds wait', () => {
        expect(mockedSnapshotFactory.createFromZip).toHaveBeenCalledWith(
          normalize(join('path', 'to', 'resources.zip')),
          'foo',
          {wait: 60}
        );
      });

    test
      .stdout()
      .stderr()
      .command(['org:resources:preview', '-w', '312'])
      .it('should set a 312 seconds wait', () => {
        expect(mockedSnapshotFactory.createFromZip).toHaveBeenCalledWith(
          normalize(join('path', 'to', 'resources.zip')),
          'foo',
          {wait: 312}
        );
      });

    test
      .stdout()
      .stderr()
      .command(['org:resources:preview'])
      .it('#validate should not take into account missing resources', () => {
        expect(mockedValidateSnapshot).toHaveBeenCalledWith(false, {wait: 60});
      });

    test
      .stdout()
      .stderr()
      .command(['org:resources:preview', '-d'])
      .it('#validate should take into account missing resoucres', () => {
        expect(mockedValidateSnapshot).toHaveBeenCalledWith(true, {wait: 60});
      });

    test
      .stdout()
      .stderr()
      .command(['org:resources:preview'])
      .it('should preview the snapshot', () => {
        expect(mockedPreviewSnapshot).toHaveBeenCalledTimes(1);
      });

    test
      .stdout()
      .stderr()
      .command(['org:resources:preview'])
      .it('should delete the compressed folder', () => {
        expect(mockedDeleteTemporaryZipFile).toHaveBeenCalledTimes(1);
      });

    test
      .stdout()
      .stderr()
      .do(() => {
        mockedValidateSnapshot.mockImplementationOnce(() => {
          throw new Error('You shall not pass');
        });
      })
      .command(['org:resources:preview'])
      .catch(() => {
        expect(mockedDeleteTemporaryZipFile).toHaveBeenCalledTimes(1);
      })
      .it('should delete the compressed folder on error');

    test
      .stdout()
      .stderr()
      .command(['org:resources:preview'])
      .it('should delete the snapshot', () => {
        expect(mockedDeleteSnapshot).toHaveBeenCalledTimes(1);
      });

    test
      .stdout()
      .stderr()
      .command(['org:resources:preview'])
      .it('should display expanded preview', () => {
        expect(mockedPreviewSnapshot).toHaveBeenCalledWith(
          expect.anything(),
          expect.anything(),
          true
        );
      });

    test
      .stdout()
      .stderr()
      .command(['org:resources:preview', '--previewLevel', 'light'])
      .it('should only display light preview', () => {
        expect(mockedPreviewSnapshot).toHaveBeenCalledWith(
          expect.anything(),
          expect.anything(),
          false
        );
      });
  });
  //#region TODO: CDX-948, setup phase needs to be rewrite and assertions 'split up' (e.g. the error ain't trigger directly by the function, therefore should not be handled)
  describe('when the report contains resources in error', () => {
    beforeAll(async () => {
      await mockSnapshotFactoryReturningInvalidSnapshot();
    });

    beforeEach(() => {
      mockedAreResourcesInError.mockReturnValueOnce(false);
      mockedSaveDetailedReport.mockReturnValueOnce(
        normalize(join('saved', 'snapshot'))
      );
    });

    afterAll(() => {
      mockedSnapshotFactory.mockReset();
    });

    test
      .stdout()
      .stderr()
      .command(['org:resources:preview'])
      .catch(/Invalid snapshot/)
      .it('should throw an error for invalid snapshots');

    test
      .stdout()
      .stderr()
      .command(['org:resources:preview'])
      .catch((ctx) => {
        expect(ctx.message).toContain(
          'https://platform.cloud.coveo.com/admin/#potato-org/organization/resource-snapshots/banana-snapshot'
        );
      })
      .it('should print an URL to the snapshot page');
  });

  describe('when the snapshot is not in sync with the target org', () => {
    beforeAll(async () => {
      await mockSnapshotFactoryReturningInvalidSnapshot();
      mockSnapshotFacade();
    });

    beforeEach(() => {
      mockedAreResourcesInError.mockReturnValueOnce(true);
      mockedSaveDetailedReport.mockReturnValueOnce(join('saved', 'snapshot'));
    });

    afterAll(() => {
      mockedSnapshotFactory.mockReset();
    });

    test
      .stdout()
      .stderr()
      .command(['org:resources:preview'])
      .catch(() => {
        expect(mockedTryAutomaticSynchronization).toHaveBeenCalledWith(true);
      })
      .it('should have detected and tried to resolves the conflicts');

    test
      .stdout()
      .stderr()
      .command(['org:resources:preview', '--sync'])
      .catch(() => {
        expect(mockedTryAutomaticSynchronization).toHaveBeenCalledWith(false);
      })
      .it(
        'should try to apply synchronization plan without asking for confirmation'
      );
  });

  describe('when the report contains resources with missing vault entries', () => {
    beforeAll(async () => {
      await mockSnapshotFactoryReturningSnapshotWithMissingVaultEntries();
    });

    afterAll(() => {
      mockedSnapshotFactory.mockReset();
    });

    describe('when the user refuses to migrate or type in the missing vault entries', () => {
      test
        .stdout()
        .stderr()
        .stub(CliUx.ux, 'confirm', () => async () => false)
        .command(['org:resources:preview'])
        .catch(/Your destination organization is missing vault entries/)
        .it('should throw an error for invalid snapshots');
    });
  });
  //#endregion
});
Example #19
Source File: gen-dts.ts    From vue-components-lib-seed with MIT License 4 votes vote down vote up
genVueTypes = async () => {
  const project = new Project({
    compilerOptions: {
      allowJs: true,
      declaration: true,
      emitDeclarationOnly: true,
      noEmitOnError: true,
      outDir: path.resolve(cwd(), 'dist'),
    },
    tsConfigFilePath: TSCONFIG_PATH,
    skipAddingFilesFromTsConfig: true,
  })

  const sourceFiles: SourceFile[] = []

  const entry = await parseComponentExports()
  const entrySourceFile = project.createSourceFile(
    path.resolve(cwd(), 'src/packages/my-lib.ts'),
    entry,
    { overwrite: true }
  )

  sourceFiles.push(entrySourceFile)

  const filePaths = klawSync(
    path.resolve(cwd(), 'src/packages'),
    {
      nodir: true,
    }
  )
    .map((item) => item.path)
    .filter((path) => !DEMO_RE.test(path))
    .filter((path) => !TEST_RE.test(path))

  await Promise.all(
    filePaths.map(async (file) => {
      if (file.endsWith('.vue')) {
        const content = await fs.promises.readFile(
          file,
          'utf-8'
        )
        const sfc = vueCompiler.parse(
          content as unknown as string
        )
        const { script, scriptSetup } = sfc.descriptor
        if (script || scriptSetup) {
          let content = ''
          let isTS = false
          if (script && script.content) {
            content += script.content
            if (script.lang === 'ts') isTS = true
          }
          if (scriptSetup) {
            const compiled = vueCompiler.compileScript(
              sfc.descriptor,
              {
                id: 'xxx',
              }
            )
            content += compiled.content
            if (scriptSetup.lang === 'ts') isTS = true
          }
          const sourceFile = project.createSourceFile(
            path.relative(process.cwd(), file) +
              (isTS ? '.ts' : '.js'),
            content
          )
          sourceFiles.push(sourceFile)
        }
      } else if (file.endsWith('.ts')) {
        const sourceFile = project.addSourceFileAtPath(file)
        sourceFiles.push(sourceFile)
      }
    })
  )

  const diagnostics = project.getPreEmitDiagnostics()

  console.log(
    project.formatDiagnosticsWithColorAndContext(
      diagnostics
    )
  )

  project.emitToMemory()

  for (const sourceFile of sourceFiles) {
    const emitOutput = sourceFile.getEmitOutput()
    for (const outputFile of emitOutput.getOutputFiles()) {
      const filepath = outputFile.getFilePath()

      await fs.promises.mkdir(path.dirname(filepath), {
        recursive: true,
      })

      await fs.promises.writeFile(
        filepath,
        outputFile.getText(),
        'utf8'
      )
    }
  }
}