fs-extra#readJSONSync TypeScript Examples
The following examples show how to use
fs-extra#readJSONSync.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: project.ts From cli with Apache License 2.0 | 6 votes |
public async compressResources() {
let cachedManifest;
try {
this.ensureProjectCompliance();
cachedManifest = readJsonSync(this.resourceManifestPath, {
throws: false,
});
rmSync(this.resourceManifestPath, {force: true});
await new Promise<void>((resolve, reject) => {
const outputStream = createWriteStream(this.temporaryZipPath);
const archive = archiver('zip');
outputStream.on('close', () => resolve());
archive.on('error', (err) => reject(err));
archive.pipe(outputStream);
archive.directory(this.resourcePath, false);
archive.finalize();
});
if (cachedManifest) {
writeJsonSync(this.resourceManifestPath, cachedManifest);
}
return this.temporaryZipPath;
} catch (error) {
if (cachedManifest) {
writeJsonSync(this.resourceManifestPath, cachedManifest);
}
CliUx.ux.error(error as string | Error);
}
}
Example #2
Source File: nx.ts From nx-dotnet with MIT License | 6 votes |
export function getWorkspacePackages(): string[] {
const w = readWorkspaceJson();
return Object.values(w.projects).reduce((packages, configuration) => {
const path = join(workspaceRoot, configuration.root, 'package.json');
if (existsSync(path)) {
const { name } = readJsonSync(path);
packages.push(name);
}
return packages;
}, [] as string[]);
}
Example #3
Source File: vaultHandler.ts From cli with Apache License 2.0 | 6 votes |
private getVaultEntryModels(
entries: VaultEntryAttributes[]
): VaultEntryModel[] {
const data = readJsonSync(this.vaultEntryFilePath);
const models: VaultEntryModel[] = [];
for (const {resourceName, resourceType, vaultEntryId} of entries) {
const jsonPath = this.getVaultEntryJsonPath(vaultEntryId, resourceName);
models.push({
key: vaultEntryId,
attributeReferences: [
{
jsonPath,
resourceName,
resourceType,
},
],
organizationId: this.organizationId,
value: data[vaultEntryId],
valueType: VaultValueType.STRING,
vaultVisibilityType: VaultVisibilityType.STRICT,
});
}
return models;
}
Example #4
Source File: vaultHandler.ts From cli with Apache License 2.0 | 6 votes |
private ensureEntriesValidity(
requiredEntries: VaultEntryAttributes[]
): void | never {
const missingEntries: string[] = [];
let data: Record<string, unknown>;
try {
data = readJsonSync(this.vaultEntryFilePath);
} catch (error) {
throw new InvalidVaultFileError(error);
}
for (const {vaultEntryId} of requiredEntries) {
if (
data[vaultEntryId] === undefined ||
data[vaultEntryId] === VaultHandler.defaultEntryValue
) {
missingEntries.push(vaultEntryId);
}
}
if (missingEntries.length > 0) {
throw new MissingVaultEntryValueError(missingEntries);
}
}
Example #5
Source File: fixturedActions.test.ts From dt-mergebot with MIT License | 6 votes |
/* You can use the following command to add/update fixtures with an existing PR
*
* BOT_AUTH_TOKEN=XYZ npm run create-fixture -- 43164
*/
async function testFixture(dir: string) {
// _foo.json are input files, except for Date.now from derived.json
const responsePath = join(dir, "_response.json");
const filesPath = join(dir, "_files.json");
const downloadsPath = join(dir, "_downloads.json");
const derivedPath = join(dir, "derived.json");
const resultPath = join(dir, "result.json");
const mutationsPath = join(dir, "mutations.json");
const JSONString = (value: any) => scrubDiagnosticDetails(JSON.stringify(value, null, " ") + "\n");
const response: ApolloQueryResult<PR> = readJsonSync(responsePath);
const files = readJsonSync(filesPath);
const downloads = readJsonSync(downloadsPath);
const prInfo = response.data.repository?.pullRequest;
if (!prInfo) throw new Error("Should never happen");
const derived = await deriveStateForPR(
prInfo,
(expr: string) => Promise.resolve(files[expr] as string),
(name: string, _until?: Date) => name in downloads ? downloads[name] : 0,
new Date(readJsonSync(derivedPath).now),
);
const action = process(derived);
expect(JSONString(action)).toMatchFile(resultPath);
expect(JSONString(derived)).toMatchFile(derivedPath);
const mutations = await executePrActions(action, prInfo, /*dry*/ true);
expect(JSONString(mutations)).toMatchFile(mutationsPath);
}
Example #6
Source File: filesDiffProcessor.ts From cli with Apache License 2.0 | 6 votes |
export function recursiveDirectoryDiff(
currentDir: string,
nextDir: string,
deleteMissingResources: boolean
) {
const currentFilePaths = getAllFilesPath(currentDir);
const nextFilePaths = getAllFilesPath(nextDir);
nextFilePaths.forEach((filePath) => {
const nextFileJson = readJsonSync(join(nextDir, filePath));
let dataToWrite = nextFileJson;
if (currentFilePaths.has(filePath)) {
currentFilePaths.delete(filePath);
const currentFileJSON = readJsonSync(join(currentDir, filePath));
dataToWrite = buildDiffedJson(
currentFileJSON,
nextFileJson,
deleteMissingResources
);
}
writeJsonSync(join(currentDir, filePath), dataToWrite, defaultWriteOptions);
});
if (deleteMissingResources) {
currentFilePaths.forEach((filePath) => rmSync(join(currentDir, filePath)));
}
}
Example #7
Source File: project.ts From cli with Apache License 2.0 | 6 votes |
public writeResourcesManifest(orgId: string) {
try {
const manifestJson =
readJsonSync(this.resourceManifestPath, {throws: false}) ?? {};
writeJsonSync(this.resourceManifestPath, {...manifestJson, orgId});
} catch (e: unknown) {
// noop
}
}
Example #8
Source File: project.ts From cli with Apache License 2.0 | 6 votes |
private formatResourceFiles(dirPath = this.resourcePath) {
const files = readdirSync(dirPath, {withFileTypes: true});
files.forEach((file) => {
const filePath = join(dirPath, file.name);
if (file.isDirectory()) {
this.formatResourceFiles(filePath);
return;
}
if (file.isFile() && extname(filePath) === '.json') {
const content = readJsonSync(filePath);
writeJsonSync(filePath, content, Project.jsonFormat);
}
});
}
Example #9
Source File: config.ts From cli with Apache License 2.0 | 6 votes |
public get(): Configuration {
this.ensureExists();
try {
const content = readJSONSync(this.configPath);
if (content instanceof Error) {
throw content;
}
if (!this.isSettingVersionInRange(content)) {
throw new IncompatibleConfigurationError(content.version);
}
return content;
} catch (e) {
if (e instanceof IncompatibleConfigurationError) {
CliUx.ux.error(
dedent`
The configuration at ${this.configPath} is not compatible with this version of the CLI:
${e.message}`,
{exit: false}
);
} else {
CliUx.ux.error(
`Error while reading configuration at ${this.configPath}`,
{exit: false}
);
}
this.replace(DefaultConfig);
CliUx.ux.error(
`Configuration has been reset to default value: ${JSON.stringify(
DefaultConfig
)}`,
{exit: false}
);
return DefaultConfig;
}
}
Example #10
Source File: semantic-model-provider.ts From ui5-language-assistant with Apache License 2.0 | 6 votes |
// Load the library files from the file system.
// To save the libraries to the file system use downloadLibraries.
function loadLibraries(version: TestModelVersion): Record<string, Json> {
const inputFolder = getModelFolder(version);
const files = readdirSync(inputFolder);
const LIBFILE_SUFFIX = ".designtime.api.json";
const libFiles = filter(files, (_) => _.endsWith(LIBFILE_SUFFIX));
const libToFileContent = reduce(
libFiles,
(libToFileContentMap, file) => {
const libName = file.substring(0, file.length - LIBFILE_SUFFIX.length);
libToFileContentMap[libName] = readJsonSync(resolve(inputFolder, file));
return libToFileContentMap;
},
Object.create(null)
);
return libToFileContent;
}
Example #11
Source File: snapshots-utils.ts From ui5-language-assistant with Apache License 2.0 | 6 votes |
export function readSnapshotDiagnosticsLSPResponse(
sourcesTestDir: string
): Diagnostic[] {
const lspResponsePath = getSnapshotDiagnosticsLSPResponsePath(sourcesTestDir);
const expectedDiagnostics = readJsonSync(lspResponsePath);
return expectedDiagnostics;
}
Example #12
Source File: index.ts From cli with MIT License | 6 votes |
// 安装npm到构建文件夹
private async npmInstall(baseDir) {
return new Promise((resolve, reject) => {
const installDirectory = baseDir;
const pkgJson: string = join(installDirectory, 'package.json');
if (!existsSync(pkgJson)) {
return resolve(void 0);
}
const pkg = readJSONSync(pkgJson);
const allDeps = Object.keys(
Object.assign({}, pkg.devDependencies, pkg.dependencies)
);
const spin = new Spin({
text: 'Dependencies installing...',
});
spin.start();
this.checkDepInstalled(baseDir, spin, allDeps);
installNpm({
baseDir: installDirectory,
register: this.options.npm,
slience: true,
debugLog: this.core.debug,
})
.then(() => {
clearTimeout(this.checkDepInstallTimeout);
spin.stop();
resolve(true);
})
.catch(err => {
const errmsg = (err && err.message) || err;
this.core.cli.log(` - npm install err ${errmsg}`);
clearTimeout(this.checkDepInstallTimeout);
spin.stop();
reject(errmsg);
});
});
}
Example #13
Source File: vaultHandler.spec.ts From cli with Apache License 2.0 | 5 votes |
mockedReadJsonSync = jest.mocked(readJsonSync)
Example #14
Source File: filesDiffProcessor.spec.ts From cli with Apache License 2.0 | 5 votes |
mockedReadJson = jest.mocked(readJSONSync)
Example #15
Source File: config.spec.ts From cli with Apache License 2.0 | 5 votes |
mockedReadJSON = jest.mocked(readJSONSync)
Example #16
Source File: constants.ts From elemental4 with GNU General Public License v3.0 | 5 votes |
function getgcloud() {
try {
return readJSONSync('.gcloud.json');
} catch (error) {
return {};
}
}
Example #17
Source File: index.test.ts From jest-chrome with MIT License | 5 votes |
chromeSchema = readJSONSync(
resolve(__dirname, 'jest-chrome-schema.json'),
)
Example #18
Source File: change-file.ts From electron-playground with MIT License | 5 votes |
readJSON = (path: string) => () => readJsonSync(path)
Example #19
Source File: cli.ts From cli with Apache License 2.0 | 5 votes |
export function getConfig() {
const pathToConfig = getConfigFilePath();
return readJsonSync(pathToConfig);
}
Example #20
Source File: project.ts From cli with Apache License 2.0 | 5 votes |
public getResourceManifest(): ResourceManifest | null {
return readJsonSync(this.resourceManifestPath, {throws: false});
}
Example #21
Source File: project.spec.ts From cli with Apache License 2.0 | 5 votes |
mockedReadJSONSync = jest.mocked(readJsonSync)
Example #22
Source File: pull.ts From cli with Apache License 2.0 | 5 votes |
public static flags = {
...wait(),
...organization(
'The unique identifier of the organization from which to pull the resources'
),
snapshotId: Flags.string({
char: 's',
exclusive: ['resourceTypes'],
description:
'The unique identifier of the snapshot to pull. If not specified, a new snapshot will be created. You can list available snapshot in your organization with org:resources:list',
}),
git: Flags.boolean({
char: 'g',
description:
'Whether to create a git repository when creating a new project.',
default: true,
allowNo: true,
}),
overwrite: Flags.boolean({
char: 'f',
description: 'Overwrite resources directory if it exists.',
default: false,
}),
resourceTypes: Flags.build<ResourceSnapshotType>({
parse: async (resourceType: ResourceSnapshotType) => resourceType,
})({
char: 'r',
helpValue: 'type1 type2',
description: 'The resources types to pull from the organization.',
multiple: true,
options: Object.values(ResourceSnapshotType),
default: Object.values(ResourceSnapshotType),
}),
model: Flags.build<SnapshotPullModel>({
parse: async (input: string): Promise<SnapshotPullModel> => {
const model = readJsonSync(input);
validateSnapshotPullModel(model);
return model;
},
})({
char: 'm',
helpValue: 'path/to/snapshot.json',
exclusive: ['snapshotId', 'resourceTypes', 'target'],
description:
'The path to a snapshot pull model. This flag is useful when you want to include only specific resource items in your snapshot (e.g., a subset of sources). Use the "org:resources:model:create" command to create a new Snapshot Pull Model',
}),
};
Example #23
Source File: settings-config-spec.ts From ui5-language-assistant with Apache License 2.0 | 5 votes |
describe("settings configuration properties", () => {
let packageJsonSettings: Record<string, Setting>;
before(() => {
// Get the settings from the package.json
const packageJsonPath = require.resolve(
"vscode-ui5-language-assistant/package.json"
);
const packageJsonContent = readJsonSync(packageJsonPath);
packageJsonSettings =
packageJsonContent.contributes.configuration.properties;
});
it("default setting values in package.json have the correct type", () => {
forEach(packageJsonSettings, (value, key) => {
expect(
typeof value.default,
`Setting ${key} default value type does not match the setting's defined type`
).to.equal(value.type);
});
});
it("settings in package.json are in sync with the settings package", () => {
const defaultSettingsFromPackageJson = parseSettings(packageJsonSettings);
const defaultSettings = getDefaultSettings();
expect(
defaultSettingsFromPackageJson,
"settings from package.json don't match the default settings in the language server"
).to.deep.equal({
UI5LanguageAssistant: defaultSettings,
});
});
it("enums in package.json are in sync with the settings package", () => {
const pkgJsonSettingsWithEnum = pickBy(packageJsonSettings, (_) =>
has(_, "enum")
);
forEach(pkgJsonSettingsWithEnum, (pkgJsonSetting, settingsKey) => {
const settingsModulePropName = camelCase(
settingsKey.replace(
/UI5LanguageAssistant.(\w+)\.(\w+)/,
"valid $1 $2 values"
)
);
const settingsModulePropValue = keys(
settingsModule[settingsModulePropName]
);
const pkgJsonPropValue = pkgJsonSetting.enum;
expect(settingsModulePropValue).to.deep.equalInAnyOrder(pkgJsonPropValue);
});
});
it("use the correct logging configuration property name", () => {
expect(packageJsonSettings[LOGGING_LEVEL_CONFIG_PROP]).to.exist;
expect(
packageJsonSettings[LOGGING_LEVEL_CONFIG_PROP].description
).to.include("logging");
});
type Setting = {
scope: string;
type: string;
default: unknown;
description: string;
enum?: string[];
};
function parseSettings(properties: Record<string, Setting>): unknown {
const defaultSettings = {};
forEach(properties, (value, key) => {
set(defaultSettings, key, value.default);
});
return defaultSettings;
}
});
Example #24
Source File: index.ts From sonolus-bandori-engine with MIT License | 5 votes |
levelData = fromBestdori(
readJsonSync(__dirname + '/level.json'),
archetypes
)
Example #25
Source File: create-fixture.ts From dt-mergebot with MIT License | 5 votes |
export default async function main(directory: string, overwriteInfo: boolean) {
const writeJsonSync = (file: string, json: unknown) =>
writeFileSync(file, scrubDiagnosticDetails(JSON.stringify(json, undefined, 2) + "\n"));
const fixturePath = join("src", "_tests", "fixtures", directory);
const prNumber = parseInt(directory, 10);
if (isNaN(prNumber)) throw new Error(`Expected ${directory} to be parseable as a PR number`);
if (!existsSync(fixturePath)) mkdirSync(fixturePath);
const jsonFixturePath = join(fixturePath, "_response.json");
if (overwriteInfo || !existsSync(jsonFixturePath)) {
writeJsonSync(jsonFixturePath, await getPRInfo(prNumber));
}
const response: ApolloQueryResult<PR> = readJsonSync(jsonFixturePath);
const filesJSONPath = join(fixturePath, "_files.json");
const filesFetched: {[expr: string]: string | undefined} = {};
const downloadsJSONPath = join(fixturePath, "_downloads.json");
const downloadsFetched: {[packageName: string]: number} = {};
const derivedFixturePath = join(fixturePath, "derived.json");
const shouldOverwrite = (file: string) => overwriteInfo || !existsSync(file);
const prInfo = response.data.repository?.pullRequest;
if (!prInfo) {
console.error(`Could not get PR info for ${directory}, is the number correct?`);
return;
}
const derivedInfo = await deriveStateForPR(
prInfo,
shouldOverwrite(filesJSONPath) ? initFetchFilesAndWriteToFile() : getFilesFromFile,
shouldOverwrite(downloadsJSONPath) ? initGetDownloadsAndWriteToFile() : getDownloadsFromFile,
shouldOverwrite(derivedFixturePath) ? undefined : getTimeFromFile(),
);
writeJsonSync(derivedFixturePath, derivedInfo);
const resultFixturePath = join(fixturePath, "result.json");
const actions = computeActions.process(derivedInfo);
writeJsonSync(resultFixturePath, actions);
const mutationsFixturePath = join(fixturePath, "mutations.json");
const mutations = await executePrActions(actions, prInfo, /*dry*/ true);
writeJsonSync(mutationsFixturePath, mutations);
console.log("Recorded");
function initFetchFilesAndWriteToFile() {
writeJsonSync(filesJSONPath, {}); // one-time initialization of an empty storage
return fetchFilesAndWriteToFile;
}
async function fetchFilesAndWriteToFile(expr: string, limit?: number) {
filesFetched[expr] = await fetchFile(expr, limit);
writeJsonSync(filesJSONPath, filesFetched);
return filesFetched[expr];
}
function getFilesFromFile(expr: string) {
return readJsonSync(filesJSONPath)[expr];
}
function initGetDownloadsAndWriteToFile() {
writeJsonSync(downloadsJSONPath, {}); // one-time initialization of an empty storage
return getDownloadsAndWriteToFile;
}
async function getDownloadsAndWriteToFile(packageName: string, until?: Date) {
const downloads = await getMonthlyDownloadCount(packageName, until);
downloadsFetched[packageName] = downloads;
writeJsonSync(downloadsJSONPath, downloadsFetched);
return downloads;
}
function getDownloadsFromFile(packageName: string) {
return readJsonSync(downloadsJSONPath)[packageName];
}
function getTimeFromFile() {
return new Date(readJsonSync(derivedFixturePath).now);
}
}
Example #26
Source File: orgResources.specs.ts From cli with Apache License 2.0 | 4 votes |
describe('org:resources', () => {
let testOrgId = '';
const {accessToken} = getConfig();
const snapshotProjectPath = join(getUIProjectPath(), 'snapshot-project');
const defaultTimeout = 10 * 60e3;
let processManager: ProcessManager;
let platformClient: PlatformClient;
const pathToStub = join(cwd(), '__stub__');
const createNewTerminal = (
args: string[],
procManager: ProcessManager,
cwd: string,
debugName: string
) => {
return new Terminal('node', args, {cwd}, procManager, debugName);
};
const createFieldWithoutUsingSnapshot = async (client: PlatformClient) => {
await client.field.create({
description: '',
facet: false,
includeInQuery: true,
includeInResults: true,
mergeWithLexicon: false,
multiValueFacet: false,
multiValueFacetTokenizers: ';',
name: 'firstfield',
ranking: false,
sort: false,
stemming: false,
system: false,
type: FieldTypes.STRING,
useCacheForComputedFacet: false,
useCacheForNestedQuery: false,
useCacheForNumericQuery: false,
useCacheForSort: false,
});
};
const previewChange = (
targetOrg: string,
procManager: ProcessManager,
debugName = 'org-config-preview'
) => {
const args: string[] = [
process.env.CLI_EXEC_PATH!,
'org:resources:preview',
`-o=${targetOrg}`,
'--sync',
'--wait=0',
'-p=light',
];
return createNewTerminal(args, procManager, snapshotProjectPath, debugName);
};
const pushToOrg = async (
targetOrg: string,
procManager: ProcessManager,
debugName = 'org-config-push'
) => {
const args: string[] = [
process.env.CLI_EXEC_PATH!,
'org:resources:push',
'--skipPreview',
`-o=${targetOrg}`,
'--wait=0',
];
const pushTerminal = createNewTerminal(
args,
procManager,
snapshotProjectPath,
debugName
);
await pushTerminal.when('exit').on('process').do().once();
};
const addOrgIdToModel = (
fromModelPath: string,
destinationModelPath: string,
orgId: string
) => {
const model = readJsonSync(fromModelPath);
writeJsonSync(destinationModelPath, {...model, orgId});
};
const pullFromOrg = async (
procManager: ProcessManager,
destinationPath: string,
additionalFlags: string[] = [],
debugName: string
) => {
const args: string[] = [
process.env.CLI_EXEC_PATH!,
'org:resources:pull',
'-o',
'--wait=0',
'--no-git',
...additionalFlags,
];
const pullTerminal = createNewTerminal(
args,
procManager,
destinationPath,
debugName
);
const pullTerminalExitPromise = pullTerminal
// TODO: CDX-744: understand why cannot use process.on('exit')
.when(/Project updated/)
.on('stderr')
.do()
.once();
await pullTerminal
.when(isGenericYesNoPrompt)
.on('stderr')
.do(answerPrompt(`y${EOL}`))
.until(pullTerminalExitPromise);
};
beforeAll(async () => {
testOrgId = await getTestOrg();
copySync('snapshot-project', snapshotProjectPath);
platformClient = getPlatformClient(testOrgId, accessToken);
processManager = new ProcessManager();
}, 5 * 60e3);
afterAll(async () => {
await processManager.killAllProcesses();
});
describe('org:resources:preview', () => {
describe('when resources are synchronized', () => {
let stdout = '';
const stdoutListener = (chunk: string) => {
stdout += chunk;
};
it(
'should preview the snapshot',
async () => {
const previewTerminal = previewChange(
testOrgId,
processManager,
'org-config-preview-sync'
);
const expectedOutput = [
'Extensions',
'\\+ 1 to create',
'Fields',
'\\+ 2 to create',
].join('\\s*');
const regex = new RegExp(expectedOutput, 'gm');
previewTerminal.orchestrator.process.stdout.on(
'data',
stdoutListener
);
const previewTerminalExitPromise = previewTerminal
.when('exit')
.on('process')
.do((proc) => {
proc.stdout.off('data', stdoutListener);
})
.once();
await previewTerminalExitPromise;
expect(stdout).toMatch(regex);
},
defaultTimeout
);
});
// TODO CDX-753: Create new unsynchronized state for E2E tests.
describe.skip('when resources are not synchronized', () => {
let stdout: string;
let stderr: string;
const stdoutListener = (chunk: string) => {
stdout += chunk;
};
const stderrListener = (chunk: string) => {
stderr += chunk;
};
beforeAll(async () => {
stdout = stderr = '';
await createFieldWithoutUsingSnapshot(platformClient);
});
it(
'should throw a synchronization warning on a field',
async () => {
const previewTerminal = previewChange(
testOrgId,
processManager,
'org-config-preview-unsync'
);
const process = previewTerminal.orchestrator.process;
process.stdout.on('data', stdoutListener);
process.stderr.on('data', stderrListener);
const previewTerminalExitPromise = previewTerminal
.when('exit')
.on('process')
.do((proc) => {
proc.stdout.off('data', stdoutListener);
proc.stderr.off('data', stderrListener);
})
.once();
await previewTerminalExitPromise;
expect(stdout).toMatch(/Previewing snapshot changes/);
expect(stderr).toMatch(/Checking for automatic synchronization/);
},
defaultTimeout
);
});
});
describe('org:resources:push', () => {
beforeAll(async () => {
await pushToOrg(testOrgId, processManager);
}, defaultTimeout);
it('should have pushed fields', async () => {
const fields = (await platformClient.field.list()).items;
expect(fields).toEqual(
expect.arrayContaining([
expect.objectContaining({name: 'firstfield'}),
expect.objectContaining({name: 'whereisbrian'}),
])
);
});
it('should have pushed extensions', async () => {
const extensions = await platformClient.extension.list();
expect(extensions).toEqual(
expect.arrayContaining([expect.objectContaining({name: 'palpatine'})])
);
});
});
describe('org:resources:pull', () => {
const destinationPath = getProjectPath('new-snapshot-project');
const getResourceFolderContent = (projectPath: string) =>
readdirSync(join(projectPath, 'resources'));
beforeEach(() => {
rmSync(destinationPath, {recursive: true, force: true});
ensureDirSync(destinationPath);
});
it(
"should pull the org's content",
async () => {
await pullFromOrg(
processManager,
destinationPath,
['-o', testOrgId],
'org-resources-pull-all'
);
const snapshotFiles = readdirSync(snapshotProjectPath);
const destinationFiles = readdirSync(destinationPath);
expect(snapshotFiles).toEqual(destinationFiles);
},
defaultTimeout
);
it(
'directory should only contain pulled resources and manifest',
async () => {
await pullFromOrg(
processManager,
destinationPath,
['-o', testOrgId, '-r', 'FIELD'],
'org-resources-pull-all-fields'
);
const originalResources = getResourceFolderContent(snapshotProjectPath);
const destinationResources = getResourceFolderContent(destinationPath);
expect(destinationResources.length).toBeGreaterThan(0);
expect(destinationResources.length).toBeLessThan(
originalResources.length + 1
);
},
defaultTimeout
);
it(
'snapshot should only contain one single field',
async () => {
const fixtureModelPath = join(
pathToStub,
'snapshotPullModel',
'oneFieldOnly.json'
);
const tmpModel = fileSync({postfix: '.json'});
addOrgIdToModel(fixtureModelPath, tmpModel.name, testOrgId);
await pullFromOrg(
processManager,
destinationPath,
['-m', tmpModel.name],
'org-resources-pull-one-field'
);
const fields = readJsonSync(
join(destinationPath, 'resources', 'FIELD.json')
);
expect(fields.resources.FIELD.length).toBe(1);
},
defaultTimeout
);
});
it('should not have any snapshot in the target org', async () => {
const snapshotlist = await platformClient.resourceSnapshot.list();
expect(snapshotlist).toHaveLength(0);
});
});
Example #27
Source File: pack-external-module.ts From malagu with MIT License | 4 votes |
/**
* Resolve the needed versions of production dependencies for external modules.
* @this - The active plugin instance
*/
function getProdModules(externalModules: any[], packagePath: string, dependencyGraph: any, forceExcludes: string[], runtime?: string): any[] {
const packageJson = readJSONSync(packagePath);
const prodModules: string[] = [];
// only process the module stated in dependencies section
if (!packageJson.dependencies) {
return [];
}
// Get versions of all transient modules
for (const module of externalModules) {
let moduleVersion = packageJson.dependencies[module.external];
if (moduleVersion) {
prodModules.push(`${module.external}@${moduleVersion}`);
// Check if the module has any peer dependencies and include them too
try {
const modulePackagePath = join(
dirname(packagePath),
'node_modules',
module.external,
'package.json'
);
const { peerDependencies, peerDependenciesMeta } = readJSONSync(modulePackagePath);
if (!isEmpty(peerDependencies)) {
if (!isEmpty(peerDependenciesMeta)) {
for (const key of Object.keys(peerDependencies)) {
if (peerDependenciesMeta[key]?.optional === true) {
unset(peerDependencies, key);
}
}
}
if (!isEmpty(peerDependencies)) {
const peerModules = getProdModules(Object.keys(peerDependencies).map(value => ({ external: value })),
packagePath,
dependencyGraph,
forceExcludes,
runtime
);
prodModules.push(...peerModules);
}
}
} catch (e) {
console.log(`WARNING: Could not check for peer dependencies of ${module.external}`);
}
} else {
if (!packageJson.devDependencies || !packageJson.devDependencies[module.external]) {
// Add transient dependencies if they appear not in the service's dev dependencies
const originInfo = get(dependencyGraph, 'dependencies', {})[module.origin] || {};
moduleVersion = get(get(originInfo, 'dependencies', {})[module.external], 'version');
if (typeof moduleVersion === 'object') {
moduleVersion = moduleVersion.optional;
}
if (!moduleVersion) {
moduleVersion = get(get(dependencyGraph, 'dependencies', {})[module.external], 'version');
if (!moduleVersion) {
console.log(`WARNING: Could not determine version of module ${module.external}`);
}
}
prodModules.push(moduleVersion ? `${module.external}@${moduleVersion}` : module.external);
} else if (
packageJson.devDependencies &&
packageJson.devDependencies[module.external] &&
!(forceExcludes.indexOf(module.external) !== -1)
) {
// To minimize the chance of breaking setups we whitelist packages available on AWS here. These are due to the previously missing check
// most likely set in devDependencies and should not lead to an error now.
const ignoredDevDependencies = ['aws-sdk'];
if (ignoredDevDependencies.indexOf(module.external) !== -1) {
// Runtime dependency found in devDependencies but not forcefully excluded
console.error(
`ERROR: Runtime dependency '${module.external}' found in devDependencies. Move it to dependencies or use forceExclude to explicitly exclude it.`
);
throw new Error(`Serverless-webpack dependency error: ${module.external}.`);
}
console.log(
`INFO: Runtime dependency '${module.external}' found in devDependencies. It has been excluded automatically.`
);
}
}
}
return prodModules;
}