crypto#Hash TypeScript Examples
The following examples show how to use
crypto#Hash.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Project.ts From garment with MIT License | 5 votes |
updateHash(hash: Hash) {
hash.update(this.name).update(this.path);
for (const type of this.types.values()) {
if (typeof type.updateHash === 'function') {
type.updateHash(hash);
}
}
}
Example #2
Source File: index.d.ts From amazon-kinesis-video-streams-webrtc-sdk-js-with-amazon-cognito with MIT No Attribution | 5 votes |
updateHash(hash: Hash): void;
Example #3
Source File: index.d.ts From amazon-kinesis-video-streams-webrtc-sdk-js-with-amazon-cognito with MIT No Attribution | 5 votes |
updateHash(hash: Hash): void;
Example #4
Source File: index.d.ts From amazon-kinesis-video-streams-webrtc-sdk-js-with-amazon-cognito with MIT No Attribution | 5 votes |
updateHash(hash: Hash): void;
Example #5
Source File: index.d.ts From amazon-kinesis-video-streams-webrtc-sdk-js-with-amazon-cognito with MIT No Attribution | 5 votes |
updateHash(hash: Hash): void;
Example #6
Source File: index.d.ts From amazon-kinesis-video-streams-webrtc-sdk-js-with-amazon-cognito with MIT No Attribution | 5 votes |
updateHash(hash: Hash): void;
Example #7
Source File: index.d.ts From amazon-kinesis-video-streams-webrtc-sdk-js-with-amazon-cognito with MIT No Attribution | 5 votes |
updateHash(hash: Hash): void;
Example #8
Source File: index.d.ts From amazon-kinesis-video-streams-webrtc-sdk-js-with-amazon-cognito with MIT No Attribution | 5 votes |
updateHash(hash: Hash): void;
Example #9
Source File: index.d.ts From amazon-kinesis-video-streams-webrtc-sdk-js-with-amazon-cognito with MIT No Attribution | 5 votes |
updateHash(hash: Hash): void;
Example #10
Source File: DefaultCatalogProcessingEngine.ts From backstage with Apache License 2.0 | 5 votes |
constructor(
private readonly logger: Logger,
private readonly processingDatabase: ProcessingDatabase,
private readonly orchestrator: CatalogProcessingOrchestrator,
private readonly stitcher: Stitcher,
private readonly createHash: () => Hash,
private readonly pollingIntervalMs: number = 1000,
) {}
Example #11
Source File: DigestTransform.ts From electron-request with MIT License | 5 votes |
private readonly digester: Hash;
Example #12
Source File: DefaultCatalogProcessingEngine.test.ts From backstage with Apache License 2.0 | 4 votes |
describe('DefaultCatalogProcessingEngine', () => {
const db = {
transaction: jest.fn(),
getProcessableEntities: jest.fn(),
updateProcessedEntity: jest.fn(),
updateEntityCache: jest.fn(),
listParents: jest.fn(),
} as unknown as jest.Mocked<DefaultProcessingDatabase>;
const orchestrator: jest.Mocked<CatalogProcessingOrchestrator> = {
process: jest.fn(),
};
const stitcher = {
stitch: jest.fn(),
} as unknown as jest.Mocked<Stitcher>;
const hash = {
update: () => hash,
digest: jest.fn(),
} as unknown as jest.Mocked<Hash>;
beforeEach(() => {
jest.resetAllMocks();
});
it('should process stuff', async () => {
orchestrator.process.mockResolvedValue({
ok: true,
completedEntity: {
apiVersion: '1',
kind: 'Location',
metadata: { name: 'test' },
},
relations: [],
errors: [],
deferredEntities: [],
state: {},
});
const engine = new DefaultCatalogProcessingEngine(
getVoidLogger(),
db,
orchestrator,
stitcher,
() => hash,
);
db.transaction.mockImplementation(cb => cb((() => {}) as any));
db.getProcessableEntities
.mockImplementation(async () => {
await engine.stop();
return { items: [] };
})
.mockResolvedValueOnce({
items: [
{
entityRef: 'foo',
id: '1',
unprocessedEntity: {
apiVersion: '1',
kind: 'Location',
metadata: { name: 'test' },
},
resultHash: '',
state: [] as any,
nextUpdateAt: DateTime.now(),
lastDiscoveryAt: DateTime.now(),
},
],
});
db.listParents.mockResolvedValue({ entityRefs: [] });
db.updateProcessedEntity.mockResolvedValue({
previous: { relations: [] },
});
await engine.start();
await waitForExpect(() => {
expect(orchestrator.process).toBeCalledTimes(1);
expect(orchestrator.process).toBeCalledWith({
entity: {
apiVersion: '1',
kind: 'Location',
metadata: { name: 'test' },
},
state: [], // State is forwarded as is, even if it's a bad format
});
});
await engine.stop();
});
it('should process stuff even if the first attempt fail', async () => {
orchestrator.process.mockResolvedValue({
ok: true,
completedEntity: {
apiVersion: '1',
kind: 'Location',
metadata: { name: 'test' },
},
relations: [],
errors: [],
deferredEntities: [],
state: {},
});
const engine = new DefaultCatalogProcessingEngine(
getVoidLogger(),
db,
orchestrator,
stitcher,
() => hash,
);
db.transaction.mockImplementation(cb => cb((() => {}) as any));
db.getProcessableEntities
.mockImplementation(async () => {
await engine.stop();
return { items: [] };
})
.mockRejectedValueOnce(new Error('I FAILED'))
.mockResolvedValueOnce({
items: [
{
entityRef: 'foo',
id: '1',
unprocessedEntity: {
apiVersion: '1',
kind: 'Location',
metadata: { name: 'test' },
},
resultHash: '',
state: { cache: { myProcessor: { myKey: 'myValue' } } },
nextUpdateAt: DateTime.now(),
lastDiscoveryAt: DateTime.now(),
},
],
});
db.listParents.mockResolvedValue({ entityRefs: [] });
db.updateProcessedEntity.mockImplementation(async () => ({
previous: { relations: [] },
}));
await engine.start();
await waitForExpect(() => {
expect(orchestrator.process).toBeCalledTimes(1);
expect(orchestrator.process).toBeCalledWith({
entity: {
apiVersion: '1',
kind: 'Location',
metadata: { name: 'test' },
},
state: { cache: { myProcessor: { myKey: 'myValue' } } },
});
});
await engine.stop();
});
it('runs fully when hash mismatches, early-outs when hash matches', async () => {
const entity = {
apiVersion: '1',
kind: 'Location',
metadata: { name: 'test' },
};
const refreshState = {
id: '',
entityRef: '',
unprocessedEntity: entity,
resultHash: 'the matching hash',
state: {},
nextUpdateAt: DateTime.now(),
lastDiscoveryAt: DateTime.now(),
};
hash.digest.mockReturnValue('the matching hash');
orchestrator.process.mockResolvedValue({
ok: true,
completedEntity: entity,
relations: [],
errors: [],
deferredEntities: [],
state: {},
});
const engine = new DefaultCatalogProcessingEngine(
getVoidLogger(),
db,
orchestrator,
stitcher,
() => hash,
);
db.transaction.mockImplementation(cb => cb((() => {}) as any));
db.listParents.mockResolvedValue({ entityRefs: [] });
db.getProcessableEntities
.mockResolvedValueOnce({
items: [{ ...refreshState, resultHash: 'NOT RIGHT' }],
})
.mockResolvedValue({ items: [] });
db.updateProcessedEntity.mockImplementation(async () => ({
previous: { relations: [] },
}));
await engine.start();
await waitForExpect(() => {
expect(orchestrator.process).toBeCalledTimes(1);
expect(hash.digest).toBeCalledTimes(1);
expect(db.updateProcessedEntity).toBeCalledTimes(1);
expect(db.listParents).toBeCalledTimes(1);
});
expect(db.updateEntityCache).not.toHaveBeenCalled();
db.getProcessableEntities
.mockReset()
.mockResolvedValueOnce({
items: [{ ...refreshState, state: { something: 'different' } }],
})
.mockResolvedValue({ items: [] });
await waitForExpect(() => {
expect(orchestrator.process).toBeCalledTimes(2);
expect(hash.digest).toBeCalledTimes(2);
expect(db.updateProcessedEntity).toBeCalledTimes(1);
expect(db.updateEntityCache).toBeCalledTimes(1);
expect(db.listParents).toBeCalledTimes(2);
});
expect(db.updateEntityCache).toHaveBeenCalledWith(expect.anything(), {
id: '',
state: { ttl: 5 },
});
await engine.stop();
});
it('should decrease the state ttl if there are errors', async () => {
const entity = {
apiVersion: '1',
kind: 'Location',
metadata: { name: 'test' },
};
const refreshState = {
id: '',
entityRef: '',
unprocessedEntity: entity,
resultHash: 'the matching hash',
state: { some: 'value', ttl: 1 },
nextUpdateAt: DateTime.now(),
lastDiscoveryAt: DateTime.now(),
};
hash.digest.mockReturnValue('the matching hash');
orchestrator.process.mockResolvedValue({
ok: false,
errors: [],
});
const engine = new DefaultCatalogProcessingEngine(
getVoidLogger(),
db,
orchestrator,
stitcher,
() => hash,
);
db.transaction.mockImplementation(cb => cb((() => {}) as any));
await engine.start();
db.getProcessableEntities
.mockResolvedValueOnce({
items: [refreshState],
})
.mockResolvedValue({ items: [] });
db.listParents.mockResolvedValue({ entityRefs: [] });
db.updateProcessedEntity.mockImplementation(async () => ({
previous: { relations: [] },
}));
await waitForExpect(() => {
expect(db.updateEntityCache).toBeCalledTimes(1);
});
expect(db.updateEntityCache).toHaveBeenCalledWith(expect.anything(), {
id: '',
state: { some: 'value', ttl: 0 },
});
// Second run, the TTL should now reach 0 and the cache should be cleared
db.getProcessableEntities
.mockResolvedValueOnce({
items: [
{
...refreshState,
state: db.updateEntityCache.mock.calls[0][1].state,
},
],
})
.mockResolvedValue({ items: [] });
db.updateEntityCache.mockReset();
await waitForExpect(() => {
expect(db.updateEntityCache).toBeCalledTimes(1);
});
expect(db.updateEntityCache).toHaveBeenCalledWith(expect.anything(), {
id: '',
state: {},
});
await engine.stop();
});
it('should stitch both the previous and new sources when relations change', async () => {
const engine = new DefaultCatalogProcessingEngine(
getVoidLogger(),
db,
orchestrator,
stitcher,
() => hash,
100,
);
db.transaction.mockImplementation(cb => cb((() => {}) as any));
const entity = {
apiVersion: '1',
kind: 'k',
metadata: { name: 'me', namespace: 'ns' },
};
const processableEntity = {
entityRef: 'foo',
id: '1',
unprocessedEntity: entity,
resultHash: '',
state: [] as any,
nextUpdateAt: DateTime.now(),
lastDiscoveryAt: DateTime.now(),
};
db.listParents.mockResolvedValue({ entityRefs: [] });
db.getProcessableEntities
.mockResolvedValueOnce({
items: [processableEntity],
})
.mockResolvedValueOnce({
items: [processableEntity],
});
db.updateProcessedEntity
.mockImplementationOnce(async () => ({
previous: { relations: [] },
}))
.mockImplementationOnce(async () => ({
previous: {
relations: [
{
originating_entity_id: '',
type: 't',
source_entity_ref: 'k:ns/other1',
target_entity_ref: 'k:ns/me',
},
{
originating_entity_id: '',
type: 't',
source_entity_ref: 'k:ns/other2',
target_entity_ref: 'k:ns/me',
},
],
},
}));
orchestrator.process
.mockResolvedValueOnce({
ok: true,
completedEntity: entity,
relations: [
{
type: 't',
source: { kind: 'k', namespace: 'ns', name: 'other1' },
target: { kind: 'k', namespace: 'ns', name: 'me' },
},
{
type: 't',
source: { kind: 'k', namespace: 'ns', name: 'other2' },
target: { kind: 'k', namespace: 'ns', name: 'me' },
},
],
errors: [],
deferredEntities: [],
state: {},
})
.mockResolvedValueOnce({
ok: true,
completedEntity: entity,
relations: [
{
type: 't',
source: { kind: 'k', namespace: 'ns', name: 'other2' },
target: { kind: 'k', namespace: 'ns', name: 'me' },
},
{
type: 't',
source: { kind: 'k', namespace: 'ns', name: 'other3' },
target: { kind: 'k', namespace: 'ns', name: 'me' },
},
],
errors: [],
deferredEntities: [],
state: {},
});
await engine.start();
await waitForExpect(() => {
expect(stitcher.stitch).toBeCalledTimes(2);
});
expect([...stitcher.stitch.mock.calls[0][0]]).toEqual(
expect.arrayContaining(['k:ns/me', 'k:ns/other1', 'k:ns/other2']),
);
expect([...stitcher.stitch.mock.calls[1][0]]).toEqual(
expect.arrayContaining(['k:ns/me', 'k:ns/other1', 'k:ns/other3']),
);
await engine.stop();
});
});