path#normalize TypeScript Examples
The following examples show how to use
path#normalize.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: utils.ts From h1z1-server with GNU General Public License v3.0 | 7 votes |
removeCacheFullDir = function (directoryPath: string): void {
const files = readdirSync(directoryPath); // need to be sync
for (const file of files) {
if (!file.includes(".")) {
// if it's a folder ( this feature isn't tested but should work well )
removeCacheFullDir(`${directoryPath}/${file}`);
}
if (file.substring(file.length - 3) === ".js") {
delete require.cache[normalize(`${directoryPath}/${file}`)];
}
}
}
Example #2
Source File: utils.ts From react-loosely-lazy with Apache License 2.0 | 7 votes |
/**
* Generates a relative path to the module that should be 1:1 with what the
* webpack plugin generates for the key for the chunk in the manifest.
*
* @param filename - The absolute path to the file being transpiled
* @param importPath - The import string as it is written in application source code
* @param modulePathReplacer - Contains from and to string keys to override a specific part of the resulting
* module paths generated
* @param resolver - Instance of 'enhanced-resolve' with the custom configuration
*/
export function getModulePath({
filename,
importPath,
modulePathReplacer,
resolver,
}: GetModulePathOptions): string {
// Resolve the import path starting from the filename path itself rather than from within this file
const modulePath = resolver.resolveSync({}, dirname(filename), importPath);
const path = relative(process.cwd(), String(modulePath));
if (modulePathReplacer) {
const { from, to } = modulePathReplacer;
// Normalize the "from" option so that it matches the normalized relative path format and replace it with whatever
// is in the "to" option
const normalizedFrom = normalize(from);
return path.replace(normalizedFrom, to);
}
return addDotSlashPrefix(path);
}
Example #3
Source File: preset.ts From yfm-docs with MIT License | 6 votes |
function get(path: string): YfmPreset {
let combinedValues: YfmPreset = {};
let localPath = normalize(path);
while (localPath !== '.') {
const presetValues: YfmPreset = presetStorage.get(localPath) || {};
localPath = dirname(localPath);
combinedValues = {
...presetValues,
...combinedValues,
};
}
// Add root' presets
combinedValues = {
...presetStorage.get('.'),
...combinedValues,
};
return combinedValues;
}
Example #4
Source File: preset.ts From yfm-docs with MIT License | 6 votes |
function add(parsedPreset: DocPreset, path: string, varsPreset: string) {
const combinedValues: YfmPreset = {
...parsedPreset.default || {},
...parsedPreset[varsPreset] || {},
};
const key = dirname(normalize(path));
presetStorage.set(key, combinedValues);
}
Example #5
Source File: NoteWorkspace.ts From vscode-markdown-notes with GNU General Public License v3.0 | 6 votes |
static wikiLinkCompletionForConvention(
uri: vscode.Uri,
fromDocument: vscode.TextDocument
): string {
if (this.useUniqueFilenames()) {
let filename = basename(uri.fsPath);
let c = this.cfg().noteCompletionConvention;
return this._wikiLinkCompletionForConvention(c, filename);
} else {
let toPath = uri.fsPath;
let fromDir = dirname(fromDocument.uri.fsPath.toString());
let rel = normalize(relative(fromDir, toPath));
return rel;
}
}
Example #6
Source File: snapshotCommon.ts From cli with Apache License 2.0 | 6 votes |
export async function dryRun(
targetOrg: string,
projectPath: string,
cfg: Configuration,
options: DryRunOptions = {}
) {
options = {...defaultDryRunOptions, ...options};
const project = new Project(normalize(projectPath), targetOrg);
const snapshot = await getSnapshotForDryRun(project, targetOrg, options);
CliUx.ux.action.start('Validating snapshot');
let reporter = await internalDryRun(project, snapshot, cfg, options);
return {reporter, snapshot, project};
}
Example #7
Source File: github.ts From yfm-docs with MIT License | 6 votes |
function addContributorForPath(paths: string[], newContributor: Contributors, hasIncludes = false): void {
paths.forEach((path: string) => {
const normalizePath = normalize(addSlashPrefix(path));
if (!contributorsByPath.has(normalizePath)) {
contributorsByPath.set(normalizePath, {
contributors: newContributor,
hasIncludes,
});
return;
}
const oldContributors = contributorsByPath.get(normalizePath);
contributorsByPath.set(normalizePath, {
contributors: {
...oldContributors?.contributors,
...newContributor,
},
hasIncludes,
});
});
}
Example #8
Source File: push.spec.ts From cli with Apache License 2.0 | 6 votes |
mockProject = () => {
mockedProject.mockImplementation(
() =>
({
compressResources: () =>
Promise.resolve(normalize(join('path', 'to', 'resources.zip'))),
deleteTemporaryZipFile: mockedDeleteTemporaryZipFile,
getResourceManifest: mockedGetResourceManifest,
} as unknown as Project)
);
}
Example #9
Source File: imports.ts From solc-typed-ast with Apache License 2.0 | 6 votes |
/**
* Normalize a relative import path as described in
* https://docs.soliditylang.org/en/v0.8.8/path-resolution.html#relative-imports
*
* @param importer - source unit name of importing unit
* @param imported - path of import directive
*/
function normalizeRelativeImportPath(importer: string, imported: string): string {
imported = normalize(imported);
const importedSegments = imported.split("/").filter((s) => s !== "");
let prefix = dirname(importer);
let strippedSegments = 0;
while (
strippedSegments < importedSegments.length &&
importedSegments[strippedSegments] === ".."
) {
prefix = dirname(prefix);
strippedSegments++;
}
// According to https://docs.soliditylang.org/en/v0.8.8/path-resolution.html#relative-imports when prefix
// is empty there is no leading "./". However `dirname` always returns non-empty prefixes.
// Handle this edge case.
if (prefix === "." && !importer.startsWith(".")) {
prefix = "";
}
assert(prefix === "" || !prefix.endsWith("/"), `Invalid prefix ${prefix}`);
const suffix = importedSegments.slice(strippedSegments).join("/");
return prefix === "" ? suffix : prefix + "/" + suffix;
}
Example #10
Source File: preview.spec.ts From cli with Apache License 2.0 | 6 votes |
mockProject = () => {
mockedProject.mockImplementation(
() =>
({
compressResources: () =>
Promise.resolve(normalize(join('path', 'to', 'resources.zip'))),
deleteTemporaryZipFile: mockedDeleteTemporaryZipFile,
getResourceManifest: mockedGetResourceManifest,
} as unknown as Project)
);
}
Example #11
Source File: extension.ts From dockerfiletemplate with MIT License | 6 votes |
export function createIgnoreDockerFile(
templatePath: String,
workspaceFolder: String
): Boolean {
try {
const data = readFileSync(normalize(`${__dirname}/${templatePath}`));
writeFileSync(`${workspaceFolder}/.dockerignore`, data);
} catch (error: any) {
throw new Error(error);
}
return true;
}
Example #12
Source File: extension.ts From dockerfiletemplate with MIT License | 6 votes |
export function createDockerFile(
templatePath: String,
workspaceFolder: String
): Boolean {
try {
const data = readFileSync(normalize(`${__dirname}/${templatePath}`));
writeFileSync(`${workspaceFolder}/Dockerfile`, data);
} catch (error: any) {
throw new Error(error);
}
return true;
}
Example #13
Source File: settings.ts From project-loved-web with MIT License | 5 votes |
storagePath = normalize(config.storagePath)
Example #14
Source File: hash-versioner.ts From xiome with MIT License | 5 votes |
export function hashVersioner({root, origin}: {
root: string
origin?: string
}): HashVersioner {
function getPathForUrl(url: string) {
url = url.split("#")[0]
url = url.split("?")[0]
if (origin === undefined)
return normalize(join(root, url[0] === "/" ?url.slice(1) :url))
return (url[0] === "/")
? normalize(join(root, url.slice(1)))
: normalize(join(root, origin, url))
}
async function computeHash(path: string) {
try {
const file = await readFile(path, "utf-8")
const hasher = createHash("sha256")
hasher.update(file)
return hasher.digest("hex")
}
catch (error) {
throw new HashVersionerError(
`file not found "${path}"`
)
}
}
return async function v(url: string) {
const path = getPathForUrl(url)
const hash = await computeHash(path)
const query = url.match(/\?(.+)$/)
const tag = `v=${hash}`
return normalize(
query
? url + "&" + tag
: url + "?" + tag
)
}
}
Example #15
Source File: loading.ts From amplify-codegen with Apache License 2.0 | 5 votes |
function loadSDLSchema(schemaPath: string): GraphQLSchema {
const authDirectivePath = normalize(join(__dirname, '..', 'awsAppSyncDirectives.graphql'));
const doc = loadAndMergeQueryDocuments([authDirectivePath, schemaPath]);
return buildASTSchema(doc);
}
Example #16
Source File: loading.ts From amplify-codegen with Apache License 2.0 | 5 votes |
function loadSDLSchema(schemaPath: string): GraphQLSchema {
const authDirectivePath = normalize(join(__dirname, '../../..', 'awsAppSyncDirectives.graphql'));
const doc = loadAndMergeQueryDocuments([authDirectivePath, schemaPath]);
return buildASTSchema(doc);
}
Example #17
Source File: snapshotCommon.ts From cli with Apache License 2.0 | 5 votes |
export function cleanupProject(projectPath: string) {
const project = new Project(normalize(projectPath));
project.deleteTemporaryZipFile();
}
Example #18
Source File: find-unused-module.ts From erda-ui with GNU Affero General Public License v3.0 | 5 votes |
function findUnusedModule(options: Options) {
const { cwd, entries, includes, resolveRequirePath, ignoreSuffix, ignorePath } = { ...defaultOptions, ...options };
const includeContentPath = includes.map((includePath) => (cwd ? `${cwd}/${includePath}` : includePath));
const allFiles = fastGlob.sync(includeContentPath).map((item) => normalize(item));
const entryModules: string[] = [];
const usedModules: string[] = [];
const exportModules: string[] = [];
const importModules: string[] = [];
setRequirePathResolver(resolveRequirePath);
entries.forEach((entry) => {
const entryPath = resolve(cwd, entry);
entryModules.push(entryPath);
traverseModule(entryPath, (modulePath: string, items: string[], isImport: boolean) => {
usedModules.push(modulePath);
if (isImport) {
importModules.push(...items);
} else {
exportModules.push(...items);
}
});
});
const importSet = new Set(importModules);
const exportSet = new Set(exportModules);
const unusedExport = [...exportSet].filter((x) => !importSet.has(x));
const unusedModules = allFiles.filter((filePath) => {
const resolvedFilePath = resolve(filePath);
return (
!entryModules.includes(resolvedFilePath) &&
!usedModules.includes(resolvedFilePath) &&
!ignoreSuffix.some((item) => resolvedFilePath.endsWith(item)) &&
!ignorePath.some((item) => resolvedFilePath.includes(item))
);
});
return {
all: allFiles,
used: usedModules,
unused: unusedModules,
unusedExport,
};
}
Example #19
Source File: lint.ts From DefinitelyTyped-tools with MIT License | 5 votes |
function normalizePath(file: string) {
// replaces '\' with '/' and forces all DOS drive letters to be upper-case
return normalize(file)
.replace(/\\/g, "/")
.replace(/^[a-z](?=:)/, (c) => c.toUpperCase());
}
Example #20
Source File: path.ts From fantasticon with MIT License | 5 votes |
splitSegments = (path: string): string[] =>
normalize(path)
.split(/\/|\\/)
.filter(part => part && part !== '.')
Example #21
Source File: helpers.ts From protobuf-ts with Apache License 2.0 | 5 votes |
fixtureDescriptorSetPath = normalize(join(__dirname, '../../../test-fixtures/all.descriptorset'))
Example #22
Source File: helpers.ts From protobuf-ts with Apache License 2.0 | 5 votes |
fixtureDescriptorSetPath = normalize(join(__dirname, '../../../test-fixtures/all.descriptorset'))
Example #23
Source File: metadataContributors.test.ts From yfm-docs with MIT License | 4 votes |
describe('getContentWithUpdatedMetadata (Contributors)', () => {
const metaDataOptions: MetaDataOptions = {
fileData: {
tmpInputFilePath: '',
inputFolderPathLength: 0,
},
};
const defaultVCSConnector: VCSConnector = {
addNestedContributorsForPath: () => { },
getContributorsByPath: () => Promise.resolve(null),
getUserByLogin: () => Promise.resolve(null),
};
describe('should return file content with updated contributors in metadata ' +
'if metadata options has "isContributorsEnabled" equals true.', () => {
beforeAll(() => {
metaDataOptions.isContributorsEnabled = true;
metaDataOptions.vcsConnector = defaultVCSConnector;
});
test('"getContributorsByPath" does not return any contributors with includes contributors', async () => {
metaDataOptions.vcsConnector.getContributorsByPath = () => Promise.resolve({
contributors: {},
hasIncludes: true,
});
const fileContent = readFileSync(simpleMetadataFilePath, 'utf8');
const updatedFileContent = await getContentWithUpdatedMetadata(fileContent, metaDataOptions);
const splitedFiledContent = fileContent.split(metadataBorder);
splitedFiledContent[1] = `${splitedFiledContent[1]}contributors: []${сarriage}`;
const expectedFileContent = splitedFiledContent.join(metadataBorder);
expect(updatedFileContent).toEqual(expectedFileContent);
});
test('File content does not have metadata and' +
'"getContributorsByPath" does not return any contributors with includes contributors', async () => {
metaDataOptions.vcsConnector.getContributorsByPath = () => Promise.resolve({
contributors: {},
hasIncludes: true,
});
const fileContent = readFileSync(withoutMetadataFilePath, 'utf8');
const updatedFileContent = await getContentWithUpdatedMetadata(fileContent, metaDataOptions);
const border = `${metadataBorder}${сarriage}`;
const newMetadata = `${border}contributors: []${сarriage}${border}`;
const expectedFileContent = `${newMetadata}${fileContent}`;
expect(updatedFileContent).toEqual(expectedFileContent);
});
test('"getContributorsByPath" returns contributors with includes contributors', async () => {
const contributorFirst: Contributor = {
avatar: 'https://example.ru/logo.png',
name: 'Name Surname 1',
url: 'https://example.ru',
email: '[email protected]',
login: 'alias_1',
};
const contributorSecond: Contributor = {
avatar: 'https://example.ru/logo.png',
name: 'Name Surname 2',
url: 'https://example.ru',
email: '[email protected]',
login: 'alias_2',
};
const expectedContributors: Contributors = {
[contributorFirst.email]: contributorFirst,
[contributorSecond.email]: contributorSecond,
};
const expectedContributorsArray: Contributor[] = Object.values(expectedContributors);
const expectedContributorsString: string =
replaceDoubleToSingleQuotes(JSON.stringify(expectedContributorsArray));
metaDataOptions.vcsConnector.getContributorsByPath = () => Promise.resolve({
contributors: expectedContributors,
hasIncludes: true,
});
const fileContent = readFileSync(simpleMetadataFilePath, 'utf8');
const updatedFileContent = await getContentWithUpdatedMetadata(fileContent, metaDataOptions);
const splitedFiledContent = fileContent.split(metadataBorder);
splitedFiledContent[1] =
`${splitedFiledContent[1]}contributors: ${expectedContributorsString}${сarriage}`;
const expectedFileContent = splitedFiledContent.join(metadataBorder);
expect(updatedFileContent).toEqual(expectedFileContent);
});
test('"getContributorsByPath" returns contributors without includes contributors and ' +
'file content does not have include contents', async () => {
const contributorFirst: Contributor = {
avatar: 'https://example.ru/logo.png',
name: 'Name Surname 1',
url: 'https://example.ru',
email: '[email protected]',
login: 'alias_1',
};
const expectedContributors: Contributors = {
[contributorFirst.email]: contributorFirst,
};
const expectedContributorsArray: Contributor[] = Object.values(expectedContributors);
const expectedContributorsString: string =
replaceDoubleToSingleQuotes(JSON.stringify(expectedContributorsArray));
metaDataOptions.vcsConnector.getContributorsByPath = () => Promise.resolve({
contributors: expectedContributors,
hasIncludes: false,
});
const fileContent = readFileSync(simpleMetadataFilePath, 'utf8');
const updatedFileContent = await getContentWithUpdatedMetadata(fileContent, metaDataOptions);
const splitedFiledContent = fileContent.split(metadataBorder);
splitedFiledContent[1] =
`${splitedFiledContent[1]}contributors: ${expectedContributorsString}${сarriage}`;
const expectedFileContent = splitedFiledContent.join(metadataBorder);
expect(updatedFileContent).toEqual(expectedFileContent);
});
const contributorFirst: Contributor = {
avatar: 'https://example.ru/logo.png',
name: 'Name Surname 1',
url: 'https://example.ru',
email: '[email protected]',
login: 'alias_1',
};
const includesContributorFromFirstFile: Contributor = {
avatar: 'https://example.ru/logo.png',
name: 'Name Surname includes 1',
url: 'https://example.ru',
email: '[email protected]',
login: 'alias_includes_1',
};
const includesContributorFromSecondFile: Contributor = {
avatar: 'https://example.ru/logo.png',
name: 'Name Surname includes 2',
url: 'https://example.ru',
email: '[email protected]',
login: 'alias_includes_2',
};
const getFileContributors = (path: string): Contributors => {
if (path === normalize(firstIncludeFilePath)) {
return {
[includesContributorFromFirstFile.email]: includesContributorFromFirstFile,
};
}
if (path === normalize(secondIncludeFilePath)) {
return {
[includesContributorFromSecondFile.email]: includesContributorFromSecondFile,
};
}
return {
[contributorFirst.email]: contributorFirst,
};
};
[
{
title: 'when all files does not have information about includes contributors',
getHasIncludes: () => false,
expectedContributorsArray: [
contributorFirst,
includesContributorFromFirstFile,
includesContributorFromSecondFile,
],
},
{
title: 'when first include file has information about includes contributors',
getHasIncludes: (path: string) => path === normalize(firstIncludeFilePath),
expectedContributorsArray: [contributorFirst, includesContributorFromFirstFile],
},
].forEach((item) => {
test('"getContributorsByPath" returns contributors from main ' +
`and includes files and ${item.title}`, async () => {
const expectedContributorsString: string = replaceDoubleToSingleQuotes(
JSON.stringify(item.expectedContributorsArray));
metaDataOptions.vcsConnector.getContributorsByPath = (path: string) => Promise.resolve({
contributors: getFileContributors(path),
hasIncludes: item.getHasIncludes(path),
});
metaDataOptions.fileData.tmpInputFilePath = withIncludesFilePath;
const fileContent = readFileSync(withIncludesFilePath, 'utf8');
const updatedFileContent = await getContentWithUpdatedMetadata(fileContent, metaDataOptions);
const splitedFiledContent = fileContent.split(metadataBorder);
splitedFiledContent[1] =
`${splitedFiledContent[1]}contributors: ${expectedContributorsString}${сarriage}`;
const expectedFileContent = splitedFiledContent.join(metadataBorder);
expect(updatedFileContent).toEqual(expectedFileContent);
});
});
});
describe('should return file content without updated contributors in metadata', () => {
test('if metadata options has "isContributorsEnabled" equals false', async () => {
metaDataOptions.isContributorsEnabled = false;
metaDataOptions.vcsConnector = defaultVCSConnector;
const fileContent = readFileSync(simpleMetadataFilePath, 'utf8');
const updatedFileContent = await getContentWithUpdatedMetadata(fileContent, metaDataOptions);
expect(updatedFileContent).toEqual(fileContent);
});
test('if metadata options has "isContributorsEnabled" equals true ' +
'and "vcsConnector" equals undefined', async () => {
metaDataOptions.isContributorsEnabled = true;
metaDataOptions.vcsConnector = undefined;
const fileContent = readFileSync(simpleMetadataFilePath, 'utf8');
const updatedFileContent = await getContentWithUpdatedMetadata(fileContent, metaDataOptions);
expect(updatedFileContent).toEqual(fileContent);
});
});
});
Example #24
Source File: push.spec.ts From cli with Apache License 2.0 | 4 votes |
describe('org:resources:push', () => {
beforeAll(() => {
mockConfig();
mockProject();
mockAuthenticatedClient();
});
beforeEach(() => {
mockUserHavingAllRequiredPlatformPrivileges();
});
afterEach(() => {
jest.clearAllMocks();
});
describe('when preconditions are not respected', () => {
test
.do(() => {
mockUserNotHavingAllRequiredPlatformPrivileges();
})
.stdout()
.stderr()
.command(['org:resources:push'])
.catch(/You are not authorized to create snapshot/)
.it('should return an error message if privileges are missing');
});
//#region TODO: CDX-948, setup phase needs to be rewrite and assertions 'split up' (e.g. the error ain't trigger directly by the function, therefore should not be handled)
describe('when the dryRun returns a report without errors', () => {
beforeAll(async () => {
await mockSnapshotFactoryReturningValidSnapshot();
});
afterAll(() => {
mockedSnapshotFactory.mockReset();
});
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => true)
.command(['org:resources:push'])
.it('should preview the snapshot', () => {
expect(mockedPreviewSnapshot).toHaveBeenCalledTimes(1);
});
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => true)
.command(['org:resources:push'])
.it('should apply the snapshot after confirmation', () => {
expect(mockedApplySnapshot).toHaveBeenCalledTimes(1);
});
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => false)
.command(['org:resources:push'])
.it('should not apply the snapshot if not confirmed', () => {
expect(mockedApplySnapshot).toHaveBeenCalledTimes(0);
});
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => true)
.command(['org:resources:push'])
.it('should work with default connected org', () => {
expect(mockedSnapshotFactory.createFromZip).toHaveBeenCalledWith(
normalize(join('path', 'to', 'resources.zip')),
'foo',
expect.objectContaining({})
);
});
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => true)
.command(['org:resources:push', '-o', 'myorg'])
.it('should work with specified target org', () => {
expect(mockedProject).toHaveBeenCalledWith(expect.anything(), 'myorg');
expect(mockedSnapshotFactory.createFromZip).toHaveBeenCalledWith(
normalize(join('path', 'to', 'resources.zip')),
'myorg',
expect.objectContaining({})
);
});
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => true)
.command(['org:resources:push'])
.it('should set a 60 seconds wait', () => {
expect(mockedSnapshotFactory.createFromZip).toHaveBeenCalledWith(
normalize(join('path', 'to', 'resources.zip')),
'foo',
{wait: 60}
);
});
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => true)
.command(['org:resources:push', '-w', '99'])
.it('should set a 99 seconds wait', () => {
expect(mockedSnapshotFactory.createFromZip).toHaveBeenCalledWith(
normalize(join('path', 'to', 'resources.zip')),
'foo',
{wait: 99}
);
});
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => true)
.command(['org:resources:push'])
.it('#should not apply missing resources', () => {
expect(mockedApplySnapshot).toHaveBeenCalledWith(false, {wait: 60});
});
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => true)
.command(['org:resources:push', '-d'])
.it('should apply missing resoucres', () => {
expect(mockedApplySnapshot).toHaveBeenCalledWith(true, {wait: 60});
});
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => true)
.command(['org:resources:push'])
.it('should delete the compressed folder', () => {
expect(mockedDeleteTemporaryZipFile).toHaveBeenCalledTimes(1);
});
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => true)
.do(() => {
mockedValidateSnapshot.mockImplementationOnce(() => {
throw new Error('You shall not pass');
});
})
.command(['org:resources:push'])
.catch(() => {
expect(mockedDeleteTemporaryZipFile).toHaveBeenCalledTimes(1);
})
.it('should delete the compressed folder on error');
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => true)
.command(['org:resources:push'])
.it('should delete the snapshot', () => {
expect(mockedDeleteSnapshot).toHaveBeenCalledTimes(1);
});
test
.stdout()
.stderr()
.command(['org:resources:push', '--skipPreview'])
.it('should apply snapshot without confrimation', () => {
expect(mockedApplySnapshot).toHaveBeenCalledTimes(1);
});
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => true)
.command(['org:resources:push', '--previewLevel', 'light'])
.it('should only display light preview', () => {
expect(mockedPreviewSnapshot).toHaveBeenCalledWith(
expect.anything(),
expect.anything(),
false
);
});
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => true)
.command(['org:resources:push', '--previewLevel', 'detailed'])
.it('should display light and expanded preview', () => {
expect(mockedPreviewSnapshot).toHaveBeenCalledWith(
expect.anything(),
expect.anything(),
true
);
});
});
describe('when the dryRun returns a report with errors', () => {
beforeAll(async () => {
await mockSnapshotFactoryReturningInvalidSnapshot();
});
test
.stdout()
.stderr()
.command(['org:resources:push'])
.catch(/Invalid snapshot/)
.it('should show the invalid snapshot error');
test
.stdout()
.stderr()
.command(['org:resources:push'])
.catch(() => {
expect(mockedPreviewSnapshot).toHaveBeenCalledTimes(1);
expect(mockedApplySnapshot).toHaveBeenCalledTimes(0);
})
.it('should only preview the snapshot');
test
.stdout()
.stderr()
.command(['org:resources:push'])
.catch(/Invalid snapshot/)
.it('should return an invalid snapshot error message');
});
describe('when the dryRun returns a report with missing vault entries', () => {
beforeAll(async () => {
await mockSnapshotFactoryReturningSnapshotWithMissingVaultEntries();
});
describe('when the user refuses to migrate or type in the missing vault entries', () => {
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => false)
.command(['org:resources:push'])
.catch(/Your destination organization is missing vault entries/)
.it('should show the missingVaultEntries snapshot error');
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => false)
.command(['org:resources:push'])
.catch(() => {
expect(mockedPreviewSnapshot).toHaveBeenCalledTimes(1);
expect(mockedApplySnapshot).toHaveBeenCalledTimes(0);
})
.it('should only preview the snapshot');
});
});
//#endregion
});
Example #25
Source File: preview.spec.ts From cli with Apache License 2.0 | 4 votes |
describe('org:resources:preview', () => {
const doMockPreconditions = function () {
const mockGit = (_target: Command) => Promise.resolve();
mockedIsGitInstalled.mockReturnValue(mockGit);
};
beforeAll(() => {
mockConfig();
mockProject();
mockAuthenticatedClient();
});
beforeEach(() => {
doMockPreconditions();
mockUserHavingAllRequiredPlatformPrivileges();
});
afterEach(() => {
mockEvaluate.mockClear();
mockedIsGitInstalled.mockClear();
});
describe('when the report contains no resources in error', () => {
beforeAll(async () => {
await mockSnapshotFactoryReturningValidSnapshot();
});
afterAll(() => {
mockedSnapshotFactory.mockReset();
});
test
.do(() => {
mockUserNotHavingAllRequiredPlatformPrivileges();
})
.stdout()
.stderr()
.command(['org:resources:preview'])
.catch(/You are not authorized to create snapshot/)
.it('should return an error message if privileges are missing');
test
.stdout()
.stderr()
.command(['org:resources:preview'])
.it('should use cwd as project', () => {
expect(mockedProject).toHaveBeenCalledWith(cwd(), 'foo');
});
test
.stdout()
.stderr()
.command(['org:resources:preview'])
.it('should work with default connected org', () => {
expect(mockedSnapshotFactory.createFromZip).toHaveBeenCalledWith(
normalize(join('path', 'to', 'resources.zip')),
'foo',
expect.objectContaining({})
);
});
test
.stdout()
.stderr()
.command(['org:resources:preview', '-o', 'myorg'])
.it('should work with specified target org', () => {
expect(mockedSnapshotFactory.createFromZip).toHaveBeenCalledWith(
normalize(join('path', 'to', 'resources.zip')),
'myorg',
expect.objectContaining({})
);
});
test
.stdout()
.stderr()
.command(['org:resources:preview'])
.it('should set a 60 seconds wait', () => {
expect(mockedSnapshotFactory.createFromZip).toHaveBeenCalledWith(
normalize(join('path', 'to', 'resources.zip')),
'foo',
{wait: 60}
);
});
test
.stdout()
.stderr()
.command(['org:resources:preview', '-w', '312'])
.it('should set a 312 seconds wait', () => {
expect(mockedSnapshotFactory.createFromZip).toHaveBeenCalledWith(
normalize(join('path', 'to', 'resources.zip')),
'foo',
{wait: 312}
);
});
test
.stdout()
.stderr()
.command(['org:resources:preview'])
.it('#validate should not take into account missing resources', () => {
expect(mockedValidateSnapshot).toHaveBeenCalledWith(false, {wait: 60});
});
test
.stdout()
.stderr()
.command(['org:resources:preview', '-d'])
.it('#validate should take into account missing resoucres', () => {
expect(mockedValidateSnapshot).toHaveBeenCalledWith(true, {wait: 60});
});
test
.stdout()
.stderr()
.command(['org:resources:preview'])
.it('should preview the snapshot', () => {
expect(mockedPreviewSnapshot).toHaveBeenCalledTimes(1);
});
test
.stdout()
.stderr()
.command(['org:resources:preview'])
.it('should delete the compressed folder', () => {
expect(mockedDeleteTemporaryZipFile).toHaveBeenCalledTimes(1);
});
test
.stdout()
.stderr()
.do(() => {
mockedValidateSnapshot.mockImplementationOnce(() => {
throw new Error('You shall not pass');
});
})
.command(['org:resources:preview'])
.catch(() => {
expect(mockedDeleteTemporaryZipFile).toHaveBeenCalledTimes(1);
})
.it('should delete the compressed folder on error');
test
.stdout()
.stderr()
.command(['org:resources:preview'])
.it('should delete the snapshot', () => {
expect(mockedDeleteSnapshot).toHaveBeenCalledTimes(1);
});
test
.stdout()
.stderr()
.command(['org:resources:preview'])
.it('should display expanded preview', () => {
expect(mockedPreviewSnapshot).toHaveBeenCalledWith(
expect.anything(),
expect.anything(),
true
);
});
test
.stdout()
.stderr()
.command(['org:resources:preview', '--previewLevel', 'light'])
.it('should only display light preview', () => {
expect(mockedPreviewSnapshot).toHaveBeenCalledWith(
expect.anything(),
expect.anything(),
false
);
});
});
//#region TODO: CDX-948, setup phase needs to be rewrite and assertions 'split up' (e.g. the error ain't trigger directly by the function, therefore should not be handled)
describe('when the report contains resources in error', () => {
beforeAll(async () => {
await mockSnapshotFactoryReturningInvalidSnapshot();
});
beforeEach(() => {
mockedAreResourcesInError.mockReturnValueOnce(false);
mockedSaveDetailedReport.mockReturnValueOnce(
normalize(join('saved', 'snapshot'))
);
});
afterAll(() => {
mockedSnapshotFactory.mockReset();
});
test
.stdout()
.stderr()
.command(['org:resources:preview'])
.catch(/Invalid snapshot/)
.it('should throw an error for invalid snapshots');
test
.stdout()
.stderr()
.command(['org:resources:preview'])
.catch((ctx) => {
expect(ctx.message).toContain(
'https://platform.cloud.coveo.com/admin/#potato-org/organization/resource-snapshots/banana-snapshot'
);
})
.it('should print an URL to the snapshot page');
});
describe('when the snapshot is not in sync with the target org', () => {
beforeAll(async () => {
await mockSnapshotFactoryReturningInvalidSnapshot();
mockSnapshotFacade();
});
beforeEach(() => {
mockedAreResourcesInError.mockReturnValueOnce(true);
mockedSaveDetailedReport.mockReturnValueOnce(join('saved', 'snapshot'));
});
afterAll(() => {
mockedSnapshotFactory.mockReset();
});
test
.stdout()
.stderr()
.command(['org:resources:preview'])
.catch(() => {
expect(mockedTryAutomaticSynchronization).toHaveBeenCalledWith(true);
})
.it('should have detected and tried to resolves the conflicts');
test
.stdout()
.stderr()
.command(['org:resources:preview', '--sync'])
.catch(() => {
expect(mockedTryAutomaticSynchronization).toHaveBeenCalledWith(false);
})
.it(
'should try to apply synchronization plan without asking for confirmation'
);
});
describe('when the report contains resources with missing vault entries', () => {
beforeAll(async () => {
await mockSnapshotFactoryReturningSnapshotWithMissingVaultEntries();
});
afterAll(() => {
mockedSnapshotFactory.mockReset();
});
describe('when the user refuses to migrate or type in the missing vault entries', () => {
test
.stdout()
.stderr()
.stub(CliUx.ux, 'confirm', () => async () => false)
.command(['org:resources:preview'])
.catch(/Your destination organization is missing vault entries/)
.it('should throw an error for invalid snapshots');
});
});
//#endregion
});