feat(core): redesign workspace file hashing
This commit is contained in:
parent
840b7deb84
commit
67dda10d55
@ -165,7 +165,7 @@ forEachCli((cliName) => {
|
||||
expect(failedTests).toContain(`- ${myapp}`);
|
||||
expect(failedTests).toContain(`- ${myapp2}`);
|
||||
expect(failedTests).toContain(`Failed projects:`);
|
||||
expect(readJson('dist/.nx-results')).toEqual({
|
||||
expect(readJson('node_modules/.cache/nx/results.json')).toEqual({
|
||||
command: 'test',
|
||||
results: {
|
||||
[myapp]: false,
|
||||
@ -318,7 +318,7 @@ forEachCli((cliName) => {
|
||||
expect(failedTests).toContain(
|
||||
'You can isolate the above projects by passing: --only-failed'
|
||||
);
|
||||
expect(readJson('dist/.nx-results')).toEqual({
|
||||
expect(readJson('node_modules/.cache/nx/results.json')).toEqual({
|
||||
command: 'test',
|
||||
results: {
|
||||
[myapp]: false,
|
||||
|
||||
@ -140,7 +140,6 @@
|
||||
"fork-ts-checker-webpack-plugin": "^3.1.1",
|
||||
"fs-extra": "7.0.1",
|
||||
"glob": "7.1.4",
|
||||
"hasha": "5.1.0",
|
||||
"html-webpack-plugin": "^3.2.0",
|
||||
"husky": "^3.0.3",
|
||||
"identity-obj-proxy": "3.0.0",
|
||||
|
||||
@ -993,7 +993,7 @@ linter.defineParser('@typescript-eslint/parser', parser);
|
||||
linter.defineRule(enforceModuleBoundariesRuleName, enforceModuleBoundaries);
|
||||
|
||||
function createFile(f) {
|
||||
return { file: f, ext: extname(f), mtime: 1 };
|
||||
return { file: f, ext: extname(f), hash: '' };
|
||||
}
|
||||
|
||||
function runRule(
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
module.exports = {
|
||||
name: 'tao',
|
||||
name: 'workspace',
|
||||
preset: '../../jest.config.js',
|
||||
transform: {
|
||||
'^.+\\.[tj]sx?$': 'ts-jest',
|
||||
|
||||
@ -59,7 +59,6 @@
|
||||
"dotenv": "8.2.0",
|
||||
"ignore": "5.0.4",
|
||||
"npm-run-all": "4.1.5",
|
||||
"hasha": "5.1.0",
|
||||
"opn": "^5.3.0",
|
||||
"rxjs": "^6.5.4",
|
||||
"semver": "5.4.1",
|
||||
|
||||
@ -106,5 +106,5 @@ describe('WorkspaceIntegrityChecks', () => {
|
||||
});
|
||||
|
||||
function createFile(f) {
|
||||
return { file: f, ext: extname(f), mtime: 1 };
|
||||
return { file: f, ext: extname(f), hash: '' };
|
||||
}
|
||||
|
||||
@ -2,7 +2,7 @@ import * as fs from 'fs';
|
||||
|
||||
import { WorkspaceResults } from './workspace-results';
|
||||
import { serializeJson } from '../utils/fileutils';
|
||||
import { ProjectType } from '..//core/project-graph';
|
||||
import { ProjectType } from '../core/project-graph';
|
||||
|
||||
describe('WorkspacesResults', () => {
|
||||
let results: WorkspaceResults;
|
||||
@ -43,7 +43,7 @@ describe('WorkspacesResults', () => {
|
||||
results.saveResults();
|
||||
|
||||
expect(fs.writeSync).not.toHaveBeenCalled();
|
||||
expect(fs.unlinkSync).toHaveBeenCalledWith('dist/.nx-results');
|
||||
expect(fs.unlinkSync).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@ -53,23 +53,6 @@ describe('WorkspacesResults', () => {
|
||||
|
||||
expect(results.getResult('proj')).toBe(false);
|
||||
});
|
||||
|
||||
it('should save results to file system', () => {
|
||||
spyOn(fs, 'writeFileSync');
|
||||
|
||||
results.setResult('proj', false);
|
||||
results.saveResults();
|
||||
|
||||
expect(fs.writeFileSync).toHaveBeenCalledWith(
|
||||
'dist/.nx-results',
|
||||
serializeJson({
|
||||
command: 'test',
|
||||
results: {
|
||||
proj: false,
|
||||
},
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when results already exist', () => {
|
||||
@ -97,7 +80,6 @@ describe('WorkspacesResults', () => {
|
||||
},
|
||||
});
|
||||
|
||||
expect(fs.readFileSync).toHaveBeenCalledWith('dist/.nx-results', 'utf-8');
|
||||
expect(results.getResult('proj')).toBe(false);
|
||||
});
|
||||
|
||||
|
||||
@ -1,9 +1,17 @@
|
||||
import * as fs from 'fs';
|
||||
import { readJsonFile, writeJsonFile } from '../utils/fileutils';
|
||||
import { unlinkSync } from 'fs';
|
||||
import {
|
||||
directoryExists,
|
||||
readJsonFile,
|
||||
writeJsonFile,
|
||||
} from '../utils/fileutils';
|
||||
import { existsSync, unlinkSync } from 'fs';
|
||||
import { ProjectGraphNode } from '../core/project-graph';
|
||||
import { join } from 'path';
|
||||
import { appRootPath } from '@nrwl/workspace/src/utils/app-root';
|
||||
import * as fsExtra from 'fs-extra';
|
||||
|
||||
const RESULTS_FILE = 'dist/.nx-results';
|
||||
const resultsDir = join(appRootPath, 'node_modules', '.cache', 'nx');
|
||||
const resultsFile = join(resultsDir, 'results.json');
|
||||
|
||||
interface NxResults {
|
||||
command: string;
|
||||
@ -31,11 +39,11 @@ export class WorkspaceResults {
|
||||
private command: string,
|
||||
private projects: Record<string, ProjectGraphNode>
|
||||
) {
|
||||
const resultsExists = fs.existsSync(RESULTS_FILE);
|
||||
const resultsExists = fs.existsSync(resultsFile);
|
||||
this.startedWithFailedProjects = false;
|
||||
if (resultsExists) {
|
||||
try {
|
||||
const commandResults = readJsonFile(RESULTS_FILE);
|
||||
const commandResults = readJsonFile(resultsFile);
|
||||
this.startedWithFailedProjects = commandResults.command === command;
|
||||
if (this.startedWithFailedProjects) {
|
||||
this.commandResults = commandResults;
|
||||
@ -56,10 +64,19 @@ export class WorkspaceResults {
|
||||
}
|
||||
|
||||
saveResults() {
|
||||
try {
|
||||
if (!existsSync(resultsDir)) {
|
||||
fsExtra.ensureDirSync(resultsDir);
|
||||
}
|
||||
} catch (e) {
|
||||
if (!directoryExists(resultsDir)) {
|
||||
throw new Error(`Failed to create directory: ${resultsDir}`);
|
||||
}
|
||||
}
|
||||
if (Object.values<boolean>(this.commandResults.results).includes(false)) {
|
||||
writeJsonFile(RESULTS_FILE, this.commandResults);
|
||||
} else if (fs.existsSync(RESULTS_FILE)) {
|
||||
unlinkSync(RESULTS_FILE);
|
||||
writeJsonFile(resultsFile, this.commandResults);
|
||||
} else if (fs.existsSync(resultsFile)) {
|
||||
unlinkSync(resultsFile);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -116,7 +116,7 @@ describe('project graph', () => {
|
||||
files = Object.keys(filesJson).map((f) => ({
|
||||
file: f,
|
||||
ext: extname(f),
|
||||
mtime: 1,
|
||||
hash: 'some-hash',
|
||||
}));
|
||||
readFileAtRevision = (p, r) => {
|
||||
const fromFs = filesJson[`./${p}`];
|
||||
@ -139,13 +139,13 @@ describe('project graph', () => {
|
||||
{
|
||||
file: 'something-for-api.txt',
|
||||
ext: '.txt',
|
||||
mtime: 1,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [new WholeFileChange()],
|
||||
},
|
||||
{
|
||||
file: 'libs/ui/src/index.ts',
|
||||
ext: '.ts',
|
||||
mtime: 1,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [new WholeFileChange()],
|
||||
},
|
||||
]);
|
||||
@ -211,7 +211,7 @@ describe('project graph', () => {
|
||||
{
|
||||
file: 'package.json',
|
||||
ext: '.json',
|
||||
mtime: 1,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => jsonDiff(packageJson, updatedPackageJson),
|
||||
},
|
||||
]);
|
||||
@ -279,7 +279,7 @@ describe('project graph', () => {
|
||||
{
|
||||
file: 'package.json',
|
||||
ext: '.json',
|
||||
mtime: 1,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => jsonDiff(packageJson, updatedPackageJson),
|
||||
},
|
||||
]);
|
||||
@ -300,7 +300,7 @@ describe('project graph', () => {
|
||||
{
|
||||
file: 'package.json',
|
||||
ext: '.json',
|
||||
mtime: 1,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => jsonDiff(packageJson, updatedPackageJson),
|
||||
},
|
||||
]);
|
||||
|
||||
@ -35,7 +35,7 @@ describe('getImplicitlyTouchedProjectsByJsonChanges', () => {
|
||||
[
|
||||
{
|
||||
file: 'package.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
ext: '.json',
|
||||
getChanges: () => [
|
||||
{
|
||||
@ -60,7 +60,7 @@ describe('getImplicitlyTouchedProjectsByJsonChanges', () => {
|
||||
[
|
||||
{
|
||||
file: 'package.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
ext: '.json',
|
||||
getChanges: () => [new WholeFileChange()],
|
||||
},
|
||||
|
||||
@ -67,7 +67,7 @@ describe('getTouchedNpmPackages', () => {
|
||||
[
|
||||
{
|
||||
file: 'package.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
ext: '.json',
|
||||
getChanges: () => [
|
||||
{
|
||||
@ -98,7 +98,7 @@ describe('getTouchedNpmPackages', () => {
|
||||
[
|
||||
{
|
||||
file: 'package.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
ext: '.json',
|
||||
getChanges: () => [
|
||||
{
|
||||
@ -137,7 +137,7 @@ describe('getTouchedNpmPackages', () => {
|
||||
[
|
||||
{
|
||||
file: 'package.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
ext: '.json',
|
||||
getChanges: () => [
|
||||
{
|
||||
@ -177,7 +177,7 @@ describe('getTouchedNpmPackages', () => {
|
||||
[
|
||||
{
|
||||
file: 'package.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
ext: '.json',
|
||||
getChanges: () => [new WholeFileChange()],
|
||||
},
|
||||
|
||||
@ -9,7 +9,7 @@ describe('getTouchedProjectsInNxJson', () => {
|
||||
{
|
||||
file: 'source.ts',
|
||||
ext: '.ts',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [new WholeFileChange()],
|
||||
},
|
||||
],
|
||||
@ -32,7 +32,7 @@ describe('getTouchedProjectsInNxJson', () => {
|
||||
{
|
||||
file: 'nx.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [new WholeFileChange()],
|
||||
},
|
||||
],
|
||||
@ -58,7 +58,7 @@ describe('getTouchedProjectsInNxJson', () => {
|
||||
{
|
||||
file: 'nx.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [
|
||||
{
|
||||
type: DiffType.Modified,
|
||||
@ -93,7 +93,7 @@ describe('getTouchedProjectsInNxJson', () => {
|
||||
{
|
||||
file: 'nx.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [
|
||||
{
|
||||
type: DiffType.Added,
|
||||
@ -138,7 +138,7 @@ describe('getTouchedProjectsInNxJson', () => {
|
||||
{
|
||||
file: 'nx.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [
|
||||
{
|
||||
type: DiffType.Deleted,
|
||||
@ -175,7 +175,7 @@ describe('getTouchedProjectsInNxJson', () => {
|
||||
{
|
||||
file: 'nx.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [
|
||||
{
|
||||
type: DiffType.Modified,
|
||||
|
||||
@ -43,7 +43,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
|
||||
{
|
||||
file: 'source.ts',
|
||||
ext: '.ts',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [new WholeFileChange()],
|
||||
},
|
||||
],
|
||||
@ -67,7 +67,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
|
||||
{
|
||||
file: 'tsconfig.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [new WholeFileChange()],
|
||||
},
|
||||
],
|
||||
@ -87,7 +87,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
|
||||
{
|
||||
file: 'tsconfig.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () =>
|
||||
jsonDiff(
|
||||
{
|
||||
@ -119,7 +119,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
|
||||
{
|
||||
file: 'tsconfig.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () =>
|
||||
jsonDiff(
|
||||
{
|
||||
@ -151,7 +151,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
|
||||
{
|
||||
file: 'tsconfig.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () =>
|
||||
jsonDiff(
|
||||
{
|
||||
@ -185,7 +185,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
|
||||
{
|
||||
file: 'tsconfig.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () =>
|
||||
jsonDiff(
|
||||
{
|
||||
@ -217,7 +217,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
|
||||
{
|
||||
file: 'tsconfig.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () =>
|
||||
jsonDiff(
|
||||
{
|
||||
@ -254,7 +254,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
|
||||
{
|
||||
file: 'tsconfig.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () =>
|
||||
jsonDiff(
|
||||
{
|
||||
@ -289,7 +289,7 @@ describe('getTouchedProjectsFromTsConfig', () => {
|
||||
{
|
||||
file: 'tsconfig.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () =>
|
||||
jsonDiff(
|
||||
{
|
||||
|
||||
@ -9,7 +9,7 @@ describe('getTouchedProjectsInWorkspaceJson', () => {
|
||||
{
|
||||
file: 'source.ts',
|
||||
ext: '.ts',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [new WholeFileChange()],
|
||||
},
|
||||
],
|
||||
@ -32,7 +32,7 @@ describe('getTouchedProjectsInWorkspaceJson', () => {
|
||||
{
|
||||
file: 'workspace.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [new WholeFileChange()],
|
||||
},
|
||||
],
|
||||
@ -57,7 +57,7 @@ describe('getTouchedProjectsInWorkspaceJson', () => {
|
||||
{
|
||||
file: 'workspace.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [
|
||||
{
|
||||
type: DiffType.Modified,
|
||||
@ -91,7 +91,7 @@ describe('getTouchedProjectsInWorkspaceJson', () => {
|
||||
{
|
||||
file: 'workspace.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [
|
||||
{
|
||||
type: DiffType.Added,
|
||||
@ -132,7 +132,7 @@ describe('getTouchedProjectsInWorkspaceJson', () => {
|
||||
{
|
||||
file: 'workspace.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [
|
||||
{
|
||||
type: DiffType.Deleted,
|
||||
@ -167,7 +167,7 @@ describe('getTouchedProjectsInWorkspaceJson', () => {
|
||||
{
|
||||
file: 'workspace.json',
|
||||
ext: '.json',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [
|
||||
{
|
||||
type: DiffType.Modified,
|
||||
|
||||
@ -7,13 +7,13 @@ describe('getTouchedProjects', () => {
|
||||
{
|
||||
file: 'libs/a/index.ts',
|
||||
ext: '.ts',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [new WholeFileChange()],
|
||||
},
|
||||
{
|
||||
file: 'libs/b/index.ts',
|
||||
ext: '.ts',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [new WholeFileChange()],
|
||||
},
|
||||
];
|
||||
@ -30,7 +30,7 @@ describe('getTouchedProjects', () => {
|
||||
{
|
||||
file: 'libs/a-b/index.ts',
|
||||
ext: '.ts',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [new WholeFileChange()],
|
||||
},
|
||||
];
|
||||
@ -47,7 +47,7 @@ describe('getTouchedProjects', () => {
|
||||
{
|
||||
file: 'libs/a-b/index.ts',
|
||||
ext: '.ts',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [new WholeFileChange()],
|
||||
},
|
||||
];
|
||||
@ -64,7 +64,7 @@ describe('getTouchedProjects', () => {
|
||||
{
|
||||
file: 'libs/a/b/index.ts',
|
||||
ext: '.ts',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
getChanges: () => [new WholeFileChange()],
|
||||
},
|
||||
];
|
||||
|
||||
@ -22,17 +22,19 @@ describe('createFileMap', () => {
|
||||
},
|
||||
};
|
||||
const files = [
|
||||
{ file: 'apps/demo/src/main.ts', mtime: 1, ext: '.ts' },
|
||||
{ file: 'apps/demo-e2e/src/main.ts', mtime: 1, ext: '.ts' },
|
||||
{ file: 'libs/ui/src/index.ts', mtime: 1, ext: '.ts' },
|
||||
{ file: 'apps/demo/src/main.ts', hash: 'some-hash', ext: '.ts' },
|
||||
{ file: 'apps/demo-e2e/src/main.ts', hash: 'some-hash', ext: '.ts' },
|
||||
{ file: 'libs/ui/src/index.ts', hash: 'some-hash', ext: '.ts' },
|
||||
];
|
||||
|
||||
const result = createFileMap(workspaceJson, files);
|
||||
|
||||
expect(result).toEqual({
|
||||
demo: [{ file: 'apps/demo/src/main.ts', mtime: 1, ext: '.ts' }],
|
||||
'demo-e2e': [{ file: 'apps/demo-e2e/src/main.ts', mtime: 1, ext: '.ts' }],
|
||||
ui: [{ file: 'libs/ui/src/index.ts', mtime: 1, ext: '.ts' }],
|
||||
demo: [{ file: 'apps/demo/src/main.ts', hash: 'some-hash', ext: '.ts' }],
|
||||
'demo-e2e': [
|
||||
{ file: 'apps/demo-e2e/src/main.ts', hash: 'some-hash', ext: '.ts' },
|
||||
],
|
||||
ui: [{ file: 'libs/ui/src/index.ts', hash: 'some-hash', ext: '.ts' }],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -6,16 +6,17 @@ import { extname } from 'path';
|
||||
import { NxArgs } from '../command-line/utils';
|
||||
import { WorkspaceResults } from '../command-line/workspace-results';
|
||||
import { appRootPath } from '../utils/app-root';
|
||||
import { readJsonFile, fileExists } from '../utils/fileutils';
|
||||
import { fileExists, readJsonFile } from '../utils/fileutils';
|
||||
import { jsonDiff } from '../utils/json-diff';
|
||||
import { ProjectGraphNode } from './project-graph';
|
||||
import { Environment, NxJson } from './shared-interfaces';
|
||||
import { defaultFileHasher } from './hasher/file-hasher';
|
||||
|
||||
const ignore = require('ignore');
|
||||
|
||||
export interface FileData {
|
||||
file: string;
|
||||
mtime: number;
|
||||
hash: string;
|
||||
ext: string;
|
||||
}
|
||||
|
||||
@ -47,15 +48,15 @@ export function calculateFileChanges(
|
||||
if (ignore) {
|
||||
files = files.filter((f) => !ignore.ignores(f));
|
||||
}
|
||||
|
||||
return files.map((f) => {
|
||||
const ext = extname(f);
|
||||
const _mtime = mtime(`${appRootPath}/${f}`);
|
||||
// Memoize results so we don't recalculate on successive invocation.
|
||||
const hash = defaultFileHasher.hashFile(f);
|
||||
|
||||
return {
|
||||
file: f,
|
||||
ext,
|
||||
mtime: _mtime,
|
||||
hash,
|
||||
getChanges: (): Change[] => {
|
||||
if (!nxArgs) {
|
||||
return [new WholeFileChange()];
|
||||
@ -110,11 +111,11 @@ function defaultReadFileAtRevision(
|
||||
}
|
||||
|
||||
function getFileData(filePath: string): FileData {
|
||||
const stat = fs.statSync(filePath);
|
||||
const file = path.relative(appRootPath, filePath).split(path.sep).join('/');
|
||||
return {
|
||||
file: path.relative(appRootPath, filePath).split(path.sep).join('/'),
|
||||
file: file,
|
||||
hash: defaultFileHasher.hashFile(filePath),
|
||||
ext: path.extname(filePath),
|
||||
mtime: stat.mtimeMs,
|
||||
};
|
||||
}
|
||||
|
||||
@ -197,13 +198,22 @@ export function rootWorkspaceFileNames(): string[] {
|
||||
return [`package.json`, workspaceFileName(), `nx.json`, `tsconfig.base.json`];
|
||||
}
|
||||
|
||||
export function rootWorkspaceFileData(): FileData[] {
|
||||
return rootWorkspaceFileNames().map((f) =>
|
||||
getFileData(`${appRootPath}/${f}`)
|
||||
);
|
||||
}
|
||||
|
||||
export function readWorkspaceFiles(): FileData[] {
|
||||
const workspaceJson = readWorkspaceJson();
|
||||
const files = [];
|
||||
|
||||
files.push(
|
||||
...rootWorkspaceFileNames().map((f) => getFileData(`${appRootPath}/${f}`))
|
||||
);
|
||||
if (defaultFileHasher.usesGitForHashing) {
|
||||
return defaultFileHasher
|
||||
.allFiles()
|
||||
.map((f) => getFileData(`${appRootPath}/${f}`));
|
||||
} else {
|
||||
const files = [];
|
||||
files.push(...rootWorkspaceFileData());
|
||||
|
||||
// Add known workspace files and directories
|
||||
files.push(...allFilesInDir(appRootPath, false));
|
||||
@ -217,6 +227,7 @@ export function readWorkspaceFiles(): FileData[] {
|
||||
|
||||
return files;
|
||||
}
|
||||
}
|
||||
|
||||
export function readEnvironment(
|
||||
target: string,
|
||||
@ -229,17 +240,6 @@ export function readEnvironment(
|
||||
return { nxJson, workspaceJson, workspaceResults };
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the time when file was last modified
|
||||
* Returns -Infinity for a non-existent file
|
||||
*/
|
||||
export function mtime(filePath: string): number {
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return -Infinity;
|
||||
}
|
||||
return fs.statSync(filePath).mtimeMs;
|
||||
}
|
||||
|
||||
export function normalizedProjectRoot(p: ProjectGraphNode): string {
|
||||
if (p.data && p.data.root) {
|
||||
const path = p.data.root.split('/').filter((v) => !!v);
|
||||
@ -252,3 +252,15 @@ export function normalizedProjectRoot(p: ProjectGraphNode): string {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
export function filesChanged(a: FileData[], b: FileData[]) {
|
||||
if (a.length !== b.length) return true;
|
||||
const sortedA = a.sort((x, y) => x.file.localeCompare(y.file));
|
||||
const sortedB = b.sort((x, y) => x.file.localeCompare(y.file));
|
||||
|
||||
for (let i = 0; i < sortedA.length; ++i) {
|
||||
if (sortedA[i].file !== sortedB[i].file) return true;
|
||||
if (sortedA[i].hash !== sortedB[i].hash) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
71
packages/workspace/src/core/hasher/file-hasher.ts
Normal file
71
packages/workspace/src/core/hasher/file-hasher.ts
Normal file
@ -0,0 +1,71 @@
|
||||
import { getFileHashes } from './git-hasher';
|
||||
import { readFileSync } from 'fs';
|
||||
import { defaultHashing, HashingImp } from './hashing-impl';
|
||||
import { appRootPath } from '../../utils/app-root';
|
||||
|
||||
type PathAndTransformer = {
|
||||
path: string;
|
||||
transformer: (x: string) => string | null;
|
||||
};
|
||||
|
||||
export function extractNameAndVersion(content: string): string {
|
||||
try {
|
||||
const c = JSON.parse(content);
|
||||
return `${c.name}${c.version}`;
|
||||
} catch (e) {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
export class FileHasher {
|
||||
fileHashes: { [path: string]: string } = {};
|
||||
usesGitForHashing = false;
|
||||
|
||||
constructor(private readonly hashing: HashingImp) {
|
||||
this.init();
|
||||
}
|
||||
|
||||
init() {
|
||||
this.fileHashes = {};
|
||||
this.getHashesFromGit();
|
||||
this.usesGitForHashing = Object.keys(this.fileHashes).length > 0;
|
||||
}
|
||||
|
||||
hashFile(path: string, transformer: (x: string) => string | null = null) {
|
||||
const relativePath = path.startsWith(appRootPath)
|
||||
? path.substr(appRootPath.length + 1)
|
||||
: path;
|
||||
if (!this.fileHashes[relativePath]) {
|
||||
this.fileHashes[relativePath] = this.processPath({ path, transformer });
|
||||
}
|
||||
return this.fileHashes[relativePath];
|
||||
}
|
||||
|
||||
allFiles() {
|
||||
return Object.keys(this.fileHashes);
|
||||
}
|
||||
|
||||
private getHashesFromGit() {
|
||||
const sliceIndex = appRootPath.length + 1;
|
||||
getFileHashes(appRootPath).forEach((hash, filename) => {
|
||||
this.fileHashes[filename.substr(sliceIndex)] = hash;
|
||||
});
|
||||
}
|
||||
|
||||
private processPath(pathAndTransformer: PathAndTransformer): string {
|
||||
try {
|
||||
if (pathAndTransformer.transformer) {
|
||||
const transformedFile = pathAndTransformer.transformer(
|
||||
readFileSync(pathAndTransformer.path).toString()
|
||||
);
|
||||
return this.hashing.hashArray([transformedFile]);
|
||||
} else {
|
||||
return this.hashing.hashFile(pathAndTransformer.path);
|
||||
}
|
||||
} catch (e) {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const defaultFileHasher = new FileHasher(defaultHashing);
|
||||
115
packages/workspace/src/core/hasher/git-hasher.ts
Normal file
115
packages/workspace/src/core/hasher/git-hasher.ts
Normal file
@ -0,0 +1,115 @@
|
||||
import { spawnSync } from 'child_process';
|
||||
|
||||
function parseGitLsTree(output: string): Map<string, string> {
|
||||
const changes: Map<string, string> = new Map<string, string>();
|
||||
if (output) {
|
||||
const gitRegex: RegExp = /([0-9]{6})\s(blob|commit)\s([a-f0-9]{40})\s*(.*)/;
|
||||
output.split('\n').forEach((line) => {
|
||||
if (line) {
|
||||
const matches: RegExpMatchArray | null = line.match(gitRegex);
|
||||
if (matches && matches[3] && matches[4]) {
|
||||
const hash: string = matches[3];
|
||||
const filename: string = matches[4];
|
||||
changes.set(filename, hash);
|
||||
} else {
|
||||
throw new Error(`Cannot parse git ls-tree input: "${line}"`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
return changes;
|
||||
}
|
||||
|
||||
function parseGitStatus(output: string): Map<string, string> {
|
||||
const changes: Map<string, string> = new Map<string, string>();
|
||||
if (!output) {
|
||||
return changes;
|
||||
}
|
||||
output
|
||||
.trim()
|
||||
.split('\n')
|
||||
.forEach((line) => {
|
||||
const [changeType, ...filenames]: string[] = line
|
||||
.trim()
|
||||
.split(' ')
|
||||
.filter((linePart) => !!linePart);
|
||||
if (changeType && filenames && filenames.length > 0) {
|
||||
changes.set(filenames[filenames.length - 1], changeType);
|
||||
}
|
||||
});
|
||||
return changes;
|
||||
}
|
||||
|
||||
function spawnProcess(command: string, args: string[], cwd: string): string {
|
||||
const r = spawnSync(command, args, { cwd });
|
||||
if (r.status !== 0) {
|
||||
throw new Error(`Failed to run ${command} ${args.join(' ')}`);
|
||||
}
|
||||
return r.stdout.toString().trim();
|
||||
}
|
||||
|
||||
function getGitHashForFiles(
|
||||
filesToHash: string[],
|
||||
path: string
|
||||
): Map<string, string> {
|
||||
const changes: Map<string, string> = new Map<string, string>();
|
||||
if (filesToHash.length) {
|
||||
const hashStdout = spawnProcess(
|
||||
'git',
|
||||
['hash-object', ...filesToHash],
|
||||
path
|
||||
);
|
||||
const hashes: string[] = hashStdout.split('\n');
|
||||
if (hashes.length !== filesToHash.length) {
|
||||
throw new Error(
|
||||
`Passed ${filesToHash.length} file paths to Git to hash, but received ${hashes.length} hashes.`
|
||||
);
|
||||
}
|
||||
for (let i: number = 0; i < hashes.length; i++) {
|
||||
const hash: string = hashes[i];
|
||||
const filePath: string = filesToHash[i];
|
||||
changes.set(filePath, hash);
|
||||
}
|
||||
}
|
||||
return changes;
|
||||
}
|
||||
|
||||
function gitLsTree(path: string): Map<string, string> {
|
||||
return parseGitLsTree(spawnProcess('git', ['ls-tree', 'HEAD', '-r'], path));
|
||||
}
|
||||
|
||||
function gitStatus(path: string): Map<string, string> {
|
||||
const filesToHash: string[] = [];
|
||||
parseGitStatus(
|
||||
spawnProcess('git', ['status', '-s', '-u', '.'], path)
|
||||
).forEach((changeType: string, filename: string) => {
|
||||
if (changeType !== 'D') {
|
||||
filesToHash.push(filename);
|
||||
}
|
||||
});
|
||||
return getGitHashForFiles(filesToHash, path);
|
||||
}
|
||||
|
||||
export function getFileHashes(path: string): Map<string, string> {
|
||||
const res = new Map<string, string>();
|
||||
|
||||
try {
|
||||
const m1 = gitLsTree(path);
|
||||
m1.forEach((hash: string, filename: string) => {
|
||||
res.set(`${path}/${filename}`, hash);
|
||||
});
|
||||
const m2 = gitStatus(path);
|
||||
m2.forEach((hash: string, filename: string) => {
|
||||
res.set(`${path}/${filename}`, hash);
|
||||
});
|
||||
return res;
|
||||
} catch (e) {
|
||||
// this strategy is only used for speeding things up.
|
||||
// ignoring all the errors
|
||||
if (process.env.NX_GIT_HASHER_LOGGING) {
|
||||
console.error(`Internal error:`);
|
||||
console.error(e);
|
||||
}
|
||||
return new Map<string, string>();
|
||||
}
|
||||
}
|
||||
@ -1,8 +1,7 @@
|
||||
import { Hasher, extractNameAndVersion } from './hasher';
|
||||
import { Hasher } from './hasher';
|
||||
import { extractNameAndVersion } from '@nrwl/workspace/src/core/hasher/file-hasher';
|
||||
|
||||
const hasha = require('hasha');
|
||||
const fs = require('fs');
|
||||
jest.mock('hasha');
|
||||
jest.mock('fs');
|
||||
|
||||
describe('Hasher', () => {
|
||||
@ -14,14 +13,13 @@ describe('Hasher', () => {
|
||||
'tsconfig.base.json': 'tsconfig.base.json.hash',
|
||||
'workspace.json': 'workspace.json.hash',
|
||||
};
|
||||
beforeEach(() => {
|
||||
hasha.mockImplementation((values) => values.join('|'));
|
||||
hasha.fromFile.mockImplementation((path) => Promise.resolve(hashes[path]));
|
||||
fs.statSync.mockReturnValue({ size: 100 });
|
||||
fs.readFileSync.mockImplementation(() =>
|
||||
JSON.stringify({ dependencies: {}, devDependencies: {} })
|
||||
);
|
||||
});
|
||||
|
||||
function createHashing(): any {
|
||||
return {
|
||||
hashArray: (values: string[]) => values.join('|'),
|
||||
hashFile: (path: string) => hashes[path],
|
||||
};
|
||||
}
|
||||
|
||||
it('should create project hash', async (done) => {
|
||||
hashes['/file'] = 'file.hash';
|
||||
@ -31,7 +29,7 @@ describe('Hasher', () => {
|
||||
proj: {
|
||||
name: 'proj',
|
||||
type: 'lib',
|
||||
data: { files: [{ file: '/file', ext: '.ts', mtime: 1 }] },
|
||||
data: { files: [{ file: '/file', ext: '.ts', hash: 'some-hash' }] },
|
||||
},
|
||||
},
|
||||
dependencies: {
|
||||
@ -41,14 +39,19 @@ describe('Hasher', () => {
|
||||
{} as any,
|
||||
{
|
||||
runtimeCacheInputs: ['echo runtime123', 'echo runtime456'],
|
||||
}
|
||||
},
|
||||
createHashing()
|
||||
);
|
||||
|
||||
const hash = await hasher.hash({
|
||||
const hash = (
|
||||
await hasher.hashTasks([
|
||||
{
|
||||
target: { project: 'proj', target: 'build' },
|
||||
id: 'proj-build',
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
},
|
||||
])
|
||||
)[0];
|
||||
|
||||
expect(hash.value).toContain('yarn.lock.hash'); //implicits
|
||||
expect(hash.value).toContain('file.hash'); //project files
|
||||
@ -87,15 +90,18 @@ describe('Hasher', () => {
|
||||
{} as any,
|
||||
{
|
||||
runtimeCacheInputs: ['boom'],
|
||||
}
|
||||
},
|
||||
createHashing()
|
||||
);
|
||||
|
||||
try {
|
||||
await hasher.hash({
|
||||
await hasher.hashTasks([
|
||||
{
|
||||
target: { project: 'proj', target: 'build' },
|
||||
id: 'proj-build',
|
||||
overrides: {},
|
||||
});
|
||||
},
|
||||
]);
|
||||
fail('Should not be here');
|
||||
} catch (e) {
|
||||
expect(e.message).toContain(
|
||||
@ -115,12 +121,16 @@ describe('Hasher', () => {
|
||||
parent: {
|
||||
name: 'parent',
|
||||
type: 'lib',
|
||||
data: { files: [{ file: '/filea', ext: '.ts', mtime: 1 }] },
|
||||
data: {
|
||||
files: [{ file: '/filea', ext: '.ts', hash: 'some-hash' }],
|
||||
},
|
||||
},
|
||||
child: {
|
||||
name: 'child',
|
||||
type: 'lib',
|
||||
data: { files: [{ file: '/fileb', ext: '.ts', mtime: 1 }] },
|
||||
data: {
|
||||
files: [{ file: '/fileb', ext: '.ts', hash: 'some-hash' }],
|
||||
},
|
||||
},
|
||||
},
|
||||
dependencies: {
|
||||
@ -128,17 +138,22 @@ describe('Hasher', () => {
|
||||
},
|
||||
},
|
||||
{} as any,
|
||||
{}
|
||||
{},
|
||||
createHashing()
|
||||
);
|
||||
|
||||
const hasha = await hasher.hash({
|
||||
const hash = (
|
||||
await hasher.hashTasks([
|
||||
{
|
||||
target: { project: 'parent', target: 'build' },
|
||||
id: 'parent-build',
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
},
|
||||
])
|
||||
)[0];
|
||||
|
||||
// note that the parent hash is based on parent source files only!
|
||||
expect(hasha.details.sources).toEqual({
|
||||
expect(hash.details.sources).toEqual({
|
||||
parent: 'a.hash',
|
||||
child: 'b.hash',
|
||||
});
|
||||
@ -155,12 +170,16 @@ describe('Hasher', () => {
|
||||
proja: {
|
||||
name: 'proja',
|
||||
type: 'lib',
|
||||
data: { files: [{ file: '/filea', ext: '.ts', mtime: 1 }] },
|
||||
data: {
|
||||
files: [{ file: '/filea', ext: '.ts', hash: 'some-hash' }],
|
||||
},
|
||||
},
|
||||
projb: {
|
||||
name: 'projb',
|
||||
type: 'lib',
|
||||
data: { files: [{ file: '/fileb', ext: '.ts', mtime: 1 }] },
|
||||
data: {
|
||||
files: [{ file: '/fileb', ext: '.ts', hash: 'some-hash' }],
|
||||
},
|
||||
},
|
||||
},
|
||||
dependencies: {
|
||||
@ -169,14 +188,19 @@ describe('Hasher', () => {
|
||||
},
|
||||
},
|
||||
{} as any,
|
||||
{}
|
||||
{},
|
||||
createHashing()
|
||||
);
|
||||
|
||||
const hasha = await hasher.hash({
|
||||
const hasha = (
|
||||
await hasher.hashTasks([
|
||||
{
|
||||
target: { project: 'proja', target: 'build' },
|
||||
id: 'proja-build',
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
},
|
||||
])
|
||||
)[0];
|
||||
|
||||
expect(hasha.value).toContain('yarn.lock.hash'); //implicits
|
||||
expect(hasha.value).toContain('a.hash'); //project files
|
||||
@ -186,11 +210,15 @@ describe('Hasher', () => {
|
||||
expect(hasha.value).toContain('build'); //target
|
||||
expect(hasha.details.sources).toEqual({ proja: 'a.hash', projb: 'b.hash' });
|
||||
|
||||
const hashb = await hasher.hash({
|
||||
const hashb = (
|
||||
await hasher.hashTasks([
|
||||
{
|
||||
target: { project: 'projb', target: 'build' },
|
||||
id: 'projb-build',
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
},
|
||||
])
|
||||
)[0];
|
||||
|
||||
expect(hashb.value).toContain('yarn.lock.hash'); //implicits
|
||||
expect(hashb.value).toContain('a.hash'); //project files
|
||||
@ -203,44 +231,6 @@ describe('Hasher', () => {
|
||||
done();
|
||||
});
|
||||
|
||||
it('should handle large binary files in a special way', async (done) => {
|
||||
fs.statSync.mockImplementation((f) => {
|
||||
if (f === '/file') return { size: 1000000 * 5 + 1 };
|
||||
return { size: 100 };
|
||||
});
|
||||
hashes['/file'] = 'file.hash';
|
||||
const hasher = new Hasher(
|
||||
{
|
||||
nodes: {
|
||||
proja: {
|
||||
name: 'proj',
|
||||
type: 'lib',
|
||||
data: { files: [{ file: '/file', ext: '.ts', mtime: 1 }] },
|
||||
},
|
||||
},
|
||||
dependencies: {},
|
||||
},
|
||||
{} as any,
|
||||
{}
|
||||
);
|
||||
|
||||
const hash = (
|
||||
await hasher.hash({
|
||||
target: { project: 'proja', target: 'build' },
|
||||
id: 'proja-build',
|
||||
overrides: { prop: 'prop-value' },
|
||||
})
|
||||
).value;
|
||||
|
||||
expect(hash).toContain('yarn.lock.hash'); //implicits
|
||||
expect(hash).toContain('5000001'); //project files
|
||||
expect(hash).toContain('prop-value'); //overrides
|
||||
expect(hash).toContain('proj'); //project
|
||||
expect(hash).toContain('build'); //target
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
describe('extractNameAndVersion', () => {
|
||||
it('should work', () => {
|
||||
const nameAndVersion = extractNameAndVersion(`
|
||||
@ -1,11 +1,17 @@
|
||||
import { ProjectGraph } from '../core/project-graph';
|
||||
import { NxJson } from '../core/shared-interfaces';
|
||||
import { Task } from './tasks-runner';
|
||||
import { statSync, readFileSync } from 'fs';
|
||||
import { rootWorkspaceFileNames } from '../core/file-utils';
|
||||
import { ProjectGraph } from '../project-graph';
|
||||
import { NxJson } from '../shared-interfaces';
|
||||
import { Task } from '../../tasks-runner/tasks-runner';
|
||||
import { readFileSync } from 'fs';
|
||||
import { rootWorkspaceFileNames } from '../file-utils';
|
||||
import { execSync } from 'child_process';
|
||||
import {
|
||||
defaultFileHasher,
|
||||
extractNameAndVersion,
|
||||
FileHasher,
|
||||
} from './file-hasher';
|
||||
import { defaultHashing, HashingImp } from './hashing-impl';
|
||||
|
||||
const resolve = require('resolve');
|
||||
const hasha = require('hasha');
|
||||
|
||||
export interface Hash {
|
||||
value: string;
|
||||
@ -38,28 +44,44 @@ interface NodeModulesResult {
|
||||
|
||||
export class Hasher {
|
||||
static version = '1.0';
|
||||
implicitDependencies: Promise<ImplicitHashResult>;
|
||||
nodeModules: Promise<NodeModulesResult>;
|
||||
runtimeInputs: Promise<RuntimeHashResult>;
|
||||
fileHashes = new FileHashes();
|
||||
projectHashes = new ProjectHashes(this.projectGraph, this.fileHashes);
|
||||
private implicitDependencies: Promise<ImplicitHashResult>;
|
||||
private nodeModules: Promise<NodeModulesResult>;
|
||||
private runtimeInputs: Promise<RuntimeHashResult>;
|
||||
private fileHasher: FileHasher;
|
||||
private projectHashes: ProjectHasher;
|
||||
private hashing: HashingImp;
|
||||
|
||||
constructor(
|
||||
private readonly projectGraph: ProjectGraph,
|
||||
private readonly nxJson: NxJson,
|
||||
private readonly options: any
|
||||
) {}
|
||||
private readonly options: any,
|
||||
hashing: HashingImp = undefined
|
||||
) {
|
||||
if (!hashing) {
|
||||
this.hashing = defaultHashing;
|
||||
this.fileHasher = defaultFileHasher;
|
||||
} else {
|
||||
this.hashing = hashing;
|
||||
this.fileHasher = new FileHasher(hashing);
|
||||
}
|
||||
this.projectHashes = new ProjectHasher(
|
||||
this.projectGraph,
|
||||
this.fileHasher,
|
||||
this.hashing
|
||||
);
|
||||
}
|
||||
|
||||
async hash(task: Task): Promise<Hash> {
|
||||
const command = hasha(
|
||||
[
|
||||
async hashTasks(tasks: Task[]): Promise<Hash[]> {
|
||||
return Promise.all(tasks.map((t) => this.hash(t)));
|
||||
}
|
||||
|
||||
private async hash(task: Task): Promise<Hash> {
|
||||
const command = this.hashing.hashArray([
|
||||
task.target.project || '',
|
||||
task.target.target || '',
|
||||
task.target.configuration || '',
|
||||
JSON.stringify(task.overrides),
|
||||
],
|
||||
{ algorithm: 'sha256' }
|
||||
);
|
||||
]);
|
||||
|
||||
const values = (await Promise.all([
|
||||
this.projectHashes.hashProject(task.target.project, [
|
||||
@ -75,12 +97,11 @@ export class Hasher {
|
||||
NodeModulesResult
|
||||
];
|
||||
|
||||
const value = hasha(
|
||||
[Hasher.version, command, ...values.map((v) => v.value)],
|
||||
{
|
||||
algorithm: 'sha256',
|
||||
}
|
||||
);
|
||||
const value = this.hashing.hashArray([
|
||||
Hasher.version,
|
||||
command,
|
||||
...values.map((v) => v.value),
|
||||
]);
|
||||
|
||||
return {
|
||||
value,
|
||||
@ -109,12 +130,7 @@ export class Hasher {
|
||||
})
|
||||
)) as any;
|
||||
|
||||
const value = await hasha(
|
||||
values.map((v) => v.value),
|
||||
{
|
||||
algorithm: 'sha256',
|
||||
}
|
||||
);
|
||||
const value = this.hashing.hashArray(values.map((v) => v.value));
|
||||
const runtime = values.reduce(
|
||||
(m, c) => ((m[c.input] = c.value), m),
|
||||
{}
|
||||
@ -143,18 +159,12 @@ export class Hasher {
|
||||
];
|
||||
|
||||
this.implicitDependencies = Promise.resolve().then(async () => {
|
||||
const fileHashes = await Promise.all(
|
||||
fileNames.map(async (file) => {
|
||||
const hash = await this.fileHashes.hashFile(file);
|
||||
const fileHashes = fileNames.map((file) => {
|
||||
const hash = this.fileHasher.hashFile(file);
|
||||
return { file, hash };
|
||||
})
|
||||
);
|
||||
|
||||
const combinedHash = await hasha(
|
||||
fileHashes.map((v) => v.hash),
|
||||
{
|
||||
algorithm: 'sha256',
|
||||
}
|
||||
});
|
||||
const combinedHash = this.hashing.hashArray(
|
||||
fileHashes.map((v) => v.hash)
|
||||
);
|
||||
return {
|
||||
value: combinedHash,
|
||||
@ -174,21 +184,17 @@ export class Hasher {
|
||||
...Object.keys(j.dependencies),
|
||||
...Object.keys(j.devDependencies),
|
||||
];
|
||||
const packageJsonHashes = await Promise.all(
|
||||
allPackages.map((d) => {
|
||||
const packageJsonHashes = allPackages.map((d) => {
|
||||
try {
|
||||
const path = resolve.sync(`${d}/package.json`, {
|
||||
basedir: process.cwd(),
|
||||
});
|
||||
return this.fileHashes
|
||||
.hashFile(path, extractNameAndVersion)
|
||||
.catch(() => '');
|
||||
return this.fileHasher.hashFile(path, extractNameAndVersion);
|
||||
} catch (e) {
|
||||
return '';
|
||||
}
|
||||
})
|
||||
);
|
||||
return { value: await hasha(packageJsonHashes) };
|
||||
});
|
||||
return { value: this.hashing.hashArray(packageJsonHashes) };
|
||||
} catch (e) {
|
||||
return { value: '' };
|
||||
}
|
||||
@ -198,12 +204,13 @@ export class Hasher {
|
||||
}
|
||||
}
|
||||
|
||||
export class ProjectHashes {
|
||||
class ProjectHasher {
|
||||
private sourceHashes: { [projectName: string]: Promise<string> } = {};
|
||||
|
||||
constructor(
|
||||
private readonly projectGraph: ProjectGraph,
|
||||
private readonly fileHashes: FileHashes
|
||||
private readonly fileHasher: FileHasher,
|
||||
private readonly hashing: HashingImp
|
||||
) {}
|
||||
|
||||
async hashProject(
|
||||
@ -231,7 +238,7 @@ export class ProjectHashes {
|
||||
},
|
||||
{ [projectName]: projectHash }
|
||||
);
|
||||
const value = await hasha([
|
||||
const value = this.hashing.hashArray([
|
||||
...depHashes.map((d) => d.value),
|
||||
projectHash,
|
||||
]);
|
||||
@ -244,88 +251,11 @@ export class ProjectHashes {
|
||||
this.sourceHashes[projectName] = new Promise(async (res) => {
|
||||
const p = this.projectGraph.nodes[projectName];
|
||||
const values = await Promise.all(
|
||||
p.data.files.map((f) => this.fileHashes.hashFile(f.file))
|
||||
p.data.files.map((f) => this.fileHasher.hashFile(f.file))
|
||||
);
|
||||
res(hasha(values, { algorithm: 'sha256' }));
|
||||
res(this.hashing.hashArray(values));
|
||||
});
|
||||
}
|
||||
return this.sourceHashes[projectName];
|
||||
}
|
||||
}
|
||||
|
||||
export function extractNameAndVersion(content: string): string {
|
||||
try {
|
||||
const c = JSON.parse(content);
|
||||
return `${c.name}${c.version}`;
|
||||
} catch (e) {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
type PathAndTransformer = {
|
||||
path: string;
|
||||
transformer: (x: string) => string | null;
|
||||
};
|
||||
|
||||
export class FileHashes {
|
||||
private queue = [] as PathAndTransformer[];
|
||||
private numberOfConcurrentReads = 0;
|
||||
private fileHashes: { [path: string]: Promise<string> } = {};
|
||||
private resolvers: { [path: string]: Function } = {};
|
||||
|
||||
async hashFile(
|
||||
path: string,
|
||||
transformer: (x: string) => string | null = null
|
||||
) {
|
||||
if (!this.fileHashes[path]) {
|
||||
this.fileHashes[path] = new Promise((res) => {
|
||||
this.resolvers[path] = res;
|
||||
this.pushFileIntoQueue({ path, transformer });
|
||||
});
|
||||
}
|
||||
return this.fileHashes[path];
|
||||
}
|
||||
|
||||
private pushFileIntoQueue(pathAndTransformer: PathAndTransformer) {
|
||||
this.queue.push(pathAndTransformer);
|
||||
if (this.numberOfConcurrentReads < 2000) {
|
||||
this.numberOfConcurrentReads++;
|
||||
this.takeFromQueue();
|
||||
}
|
||||
}
|
||||
|
||||
private takeFromQueue() {
|
||||
if (this.queue.length > 0) {
|
||||
const pathAndTransformer = this.queue.pop();
|
||||
this.processPath(pathAndTransformer)
|
||||
.then((value) => {
|
||||
this.resolvers[pathAndTransformer.path](value);
|
||||
})
|
||||
.then(() => this.takeFromQueue());
|
||||
} else {
|
||||
this.numberOfConcurrentReads--;
|
||||
}
|
||||
}
|
||||
|
||||
private processPath(pathAndTransformer: PathAndTransformer) {
|
||||
try {
|
||||
const stats = statSync(pathAndTransformer.path);
|
||||
const fileSizeInMegabytes = stats.size / 1000000;
|
||||
// large binary file, skip it
|
||||
if (fileSizeInMegabytes > 5) {
|
||||
return Promise.resolve(stats.size.toString());
|
||||
} else if (pathAndTransformer.transformer) {
|
||||
const transformedFile = pathAndTransformer.transformer(
|
||||
readFileSync(pathAndTransformer.path).toString()
|
||||
);
|
||||
return Promise.resolve('').then(() =>
|
||||
hasha([transformedFile], { algorithm: 'sha256' })
|
||||
);
|
||||
} else {
|
||||
return hasha.fromFile(pathAndTransformer.path, { algorithm: 'sha256' });
|
||||
}
|
||||
} catch (e) {
|
||||
return Promise.resolve('');
|
||||
}
|
||||
}
|
||||
}
|
||||
23
packages/workspace/src/core/hasher/hashing-impl.ts
Normal file
23
packages/workspace/src/core/hasher/hashing-impl.ts
Normal file
@ -0,0 +1,23 @@
|
||||
import * as crypto from 'crypto';
|
||||
import { readFileSync } from 'fs';
|
||||
|
||||
export class HashingImp {
|
||||
hashArray(input: string[]): string {
|
||||
const hasher = crypto.createHash('sha256');
|
||||
for (const part of input) {
|
||||
hasher.update(part);
|
||||
}
|
||||
const hash = hasher.digest().buffer;
|
||||
return Buffer.from(hash).toString('hex');
|
||||
}
|
||||
|
||||
hashFile(path: string): string {
|
||||
const hasher = crypto.createHash('sha256');
|
||||
const file = readFileSync(path);
|
||||
hasher.update(file);
|
||||
const hash = hasher.digest().buffer;
|
||||
return Buffer.from(hash).toString('hex');
|
||||
}
|
||||
}
|
||||
|
||||
export const defaultHashing = new HashingImp();
|
||||
113
packages/workspace/src/core/nx-deps/nx-deps-cache.ts
Normal file
113
packages/workspace/src/core/nx-deps/nx-deps-cache.ts
Normal file
@ -0,0 +1,113 @@
|
||||
import { FileData, filesChanged } from '../file-utils';
|
||||
import {
|
||||
ProjectGraph,
|
||||
ProjectGraphDependency,
|
||||
ProjectGraphNode,
|
||||
} from '../project-graph';
|
||||
import { join } from 'path';
|
||||
import { appRootPath } from '../../utils/app-root';
|
||||
import { existsSync } from 'fs';
|
||||
import * as fsExtra from 'fs-extra';
|
||||
import {
|
||||
directoryExists,
|
||||
fileExists,
|
||||
readJsonFile,
|
||||
writeJsonFile,
|
||||
} from '../../utils/fileutils';
|
||||
import { FileMap } from '@nrwl/workspace/src/core/file-graph';
|
||||
|
||||
export interface ProjectGraphCache {
|
||||
version: string;
|
||||
rootFiles: FileData[];
|
||||
nodes: Record<string, ProjectGraphNode>;
|
||||
dependencies: Record<string, ProjectGraphDependency[]>;
|
||||
}
|
||||
|
||||
const nxDepsDir = join(appRootPath, 'node_modules', '.cache', 'nx');
|
||||
const nxDepsPath = join(nxDepsDir, 'nxdeps.json');
|
||||
export function readCache(): false | ProjectGraphCache {
|
||||
try {
|
||||
if (!existsSync(nxDepsDir)) {
|
||||
fsExtra.ensureDirSync(nxDepsDir);
|
||||
}
|
||||
} catch (e) {
|
||||
/*
|
||||
* @jeffbcross: Node JS docs recommend against checking for existence of directory immediately before creating it.
|
||||
* Instead, just try to create the directory and handle the error.
|
||||
*
|
||||
* We ran into race conditions when running scripts concurrently, where multiple scripts were
|
||||
* arriving here simultaneously, checking for directory existence, then trying to create the directory simultaneously.
|
||||
*
|
||||
* In this case, we're creating the directory. If the operation failed, we ensure that the directory
|
||||
* exists before continuing (or raise an exception).
|
||||
*/
|
||||
if (!directoryExists(nxDepsDir)) {
|
||||
throw new Error(`Failed to create directory: ${nxDepsDir}`);
|
||||
}
|
||||
}
|
||||
|
||||
const data = fileExists(nxDepsPath) ? readJsonFile(nxDepsPath) : null;
|
||||
return data ? data : false;
|
||||
}
|
||||
|
||||
export function writeCache(
|
||||
rootFiles: FileData[],
|
||||
projectGraph: ProjectGraph
|
||||
): void {
|
||||
writeJsonFile(nxDepsPath, {
|
||||
version: '2.0',
|
||||
rootFiles,
|
||||
nodes: projectGraph.nodes,
|
||||
dependencies: projectGraph.dependencies,
|
||||
});
|
||||
}
|
||||
|
||||
export function differentFromCache(
|
||||
fileMap: FileMap,
|
||||
c: ProjectGraphCache
|
||||
): {
|
||||
noDifference: boolean;
|
||||
filesDifferentFromCache: FileMap;
|
||||
partiallyConstructedProjectGraph?: ProjectGraph;
|
||||
} {
|
||||
const currentProjects = Object.keys(fileMap).sort();
|
||||
const previousProjects = Object.keys(c.nodes)
|
||||
.sort()
|
||||
.filter((name) => c.nodes[name].data.files.length > 0);
|
||||
|
||||
// Projects changed -> compute entire graph
|
||||
if (
|
||||
currentProjects.length !== previousProjects.length ||
|
||||
currentProjects.some((val, idx) => val !== previousProjects[idx])
|
||||
) {
|
||||
return {
|
||||
filesDifferentFromCache: fileMap,
|
||||
partiallyConstructedProjectGraph: null,
|
||||
noDifference: false,
|
||||
};
|
||||
}
|
||||
|
||||
// Projects are same -> compute projects with file changes
|
||||
const filesDifferentFromCache: FileMap = {};
|
||||
currentProjects.forEach((p) => {
|
||||
if (filesChanged(c.nodes[p].data.files, fileMap[p])) {
|
||||
filesDifferentFromCache[p] = fileMap[p];
|
||||
}
|
||||
});
|
||||
|
||||
// Re-compute nodes and dependencies for each project in file map.
|
||||
Object.keys(filesDifferentFromCache).forEach((key) => {
|
||||
delete c.dependencies[key];
|
||||
});
|
||||
|
||||
const partiallyConstructedProjectGraph = {
|
||||
nodes: c.nodes,
|
||||
dependencies: c.dependencies,
|
||||
};
|
||||
|
||||
return {
|
||||
filesDifferentFromCache: filesDifferentFromCache,
|
||||
partiallyConstructedProjectGraph,
|
||||
noDifference: Object.keys(filesDifferentFromCache).length === 0,
|
||||
};
|
||||
}
|
||||
@ -10,6 +10,7 @@ export function buildWorkspaceProjectNodes(
|
||||
Object.keys(ctx.fileMap).forEach((key) => {
|
||||
const p = ctx.workspaceJson.projects[key];
|
||||
|
||||
// TODO, types and projectType should allign
|
||||
const projectType =
|
||||
p.projectType === 'application'
|
||||
? key.endsWith('-e2e')
|
||||
|
||||
@ -1,11 +1,12 @@
|
||||
import { vol, fs } from 'memfs';
|
||||
jest.mock('fs', () => require('memfs').fs);
|
||||
jest.mock('../../utils/app-root', () => ({ appRootPath: '/root' }));
|
||||
|
||||
import { stripIndents } from '@angular-devkit/core/src/utils/literals';
|
||||
import { createProjectGraph } from './project-graph';
|
||||
import { DependencyType } from './project-graph-models';
|
||||
import { NxJson } from '../shared-interfaces';
|
||||
|
||||
jest.mock('fs', () => require('memfs').fs);
|
||||
jest.mock('../../utils/app-root', () => ({ appRootPath: '/root' }));
|
||||
import { defaultFileHasher } from '@nrwl/workspace/src/core/hasher/file-hasher';
|
||||
|
||||
describe('project graph', () => {
|
||||
let packageJson: any;
|
||||
@ -198,6 +199,9 @@ describe('project graph', () => {
|
||||
//wait a tick to ensure the modified time of workspace.json will be after the creation of the project graph file
|
||||
await new Promise((resolve) => setTimeout(resolve, 1));
|
||||
fs.writeFileSync('/root/workspace.json', JSON.stringify(workspaceJson));
|
||||
|
||||
defaultFileHasher.init();
|
||||
|
||||
graph = createProjectGraph();
|
||||
expect(graph.nodes).toMatchObject({
|
||||
demo: { name: 'demo', type: 'lib' },
|
||||
|
||||
@ -1,20 +1,12 @@
|
||||
import { mkdirSync } from 'fs';
|
||||
import { appRootPath } from '../../utils/app-root';
|
||||
import {
|
||||
directoryExists,
|
||||
fileExists,
|
||||
readJsonFile,
|
||||
writeJsonFile,
|
||||
} from '../../utils/fileutils';
|
||||
import { assertWorkspaceValidity } from '../assert-workspace-validity';
|
||||
import { createFileMap, FileMap } from '../file-graph';
|
||||
import {
|
||||
defaultFileRead,
|
||||
FileData,
|
||||
mtime,
|
||||
filesChanged,
|
||||
readNxJson,
|
||||
readWorkspaceFiles,
|
||||
readWorkspaceJson,
|
||||
rootWorkspaceFileData,
|
||||
} from '../file-utils';
|
||||
import { normalizeNxJson } from '../normalize-nx-json';
|
||||
import {
|
||||
@ -30,36 +22,68 @@ import {
|
||||
} from './build-nodes';
|
||||
import { ProjectGraphBuilder } from './project-graph-builder';
|
||||
import { ProjectGraph } from './project-graph-models';
|
||||
|
||||
/**
|
||||
* This version is stored in the project graph cache to determine if it can be reused.
|
||||
*/
|
||||
const projectGraphCacheVersion = '1';
|
||||
import {
|
||||
differentFromCache,
|
||||
ProjectGraphCache,
|
||||
readCache,
|
||||
writeCache,
|
||||
} from '../nx-deps/nx-deps-cache';
|
||||
import { NxJson } from '../shared-interfaces';
|
||||
|
||||
export function createProjectGraph(
|
||||
workspaceJson = readWorkspaceJson(),
|
||||
nxJson = readNxJson(),
|
||||
workspaceFiles = readWorkspaceFiles(),
|
||||
fileRead: (s: string) => string = defaultFileRead,
|
||||
cache: false | { data: ProjectGraphCache; mtime: number } = readCache(),
|
||||
cache: false | ProjectGraphCache = readCache(),
|
||||
shouldCache: boolean = true
|
||||
): ProjectGraph {
|
||||
assertWorkspaceValidity(workspaceJson, nxJson);
|
||||
|
||||
const normalizedNxJson = normalizeNxJson(nxJson);
|
||||
if (cache && maxMTime(rootWorkspaceFileData(workspaceFiles)) > cache.mtime) {
|
||||
cache = false;
|
||||
|
||||
const rootFiles = rootWorkspaceFileData();
|
||||
const fileMap = createFileMap(workspaceJson, workspaceFiles);
|
||||
|
||||
if (cache && !filesChanged(rootFiles, cache.rootFiles)) {
|
||||
const diff = differentFromCache(fileMap, cache);
|
||||
if (diff.noDifference) {
|
||||
return diff.partiallyConstructedProjectGraph;
|
||||
}
|
||||
|
||||
if (!cache || maxMTime(workspaceFiles) > cache.mtime) {
|
||||
const fileMap = createFileMap(workspaceJson, workspaceFiles);
|
||||
const incremental = modifiedSinceCache(fileMap, cache);
|
||||
const ctx = {
|
||||
workspaceJson,
|
||||
nxJson: normalizedNxJson,
|
||||
fileMap: incremental.fileMap,
|
||||
fileMap: diff.filesDifferentFromCache,
|
||||
};
|
||||
const builder = new ProjectGraphBuilder(incremental.projectGraph);
|
||||
const projectGraph = buildProjectGraph(
|
||||
ctx,
|
||||
fileRead,
|
||||
diff.partiallyConstructedProjectGraph
|
||||
);
|
||||
if (shouldCache) {
|
||||
writeCache(rootFiles, projectGraph);
|
||||
}
|
||||
return projectGraph;
|
||||
} else {
|
||||
const ctx = {
|
||||
workspaceJson,
|
||||
nxJson: normalizedNxJson,
|
||||
fileMap: fileMap,
|
||||
};
|
||||
const projectGraph = buildProjectGraph(ctx, fileRead, null);
|
||||
if (shouldCache) {
|
||||
writeCache(rootFiles, projectGraph);
|
||||
}
|
||||
return projectGraph;
|
||||
}
|
||||
}
|
||||
|
||||
function buildProjectGraph(
|
||||
ctx: { nxJson: NxJson<string[]>; workspaceJson: any; fileMap: FileMap },
|
||||
fileRead: (s: string) => string,
|
||||
projectGraph: ProjectGraph
|
||||
) {
|
||||
const builder = new ProjectGraphBuilder(projectGraph);
|
||||
const buildNodesFns: BuildNodes[] = [
|
||||
buildWorkspaceProjectNodes,
|
||||
buildNpmPackageNodes,
|
||||
@ -69,149 +93,9 @@ export function createProjectGraph(
|
||||
buildImplicitProjectDependencies,
|
||||
buildExplicitNpmDependencies,
|
||||
];
|
||||
|
||||
buildNodesFns.forEach((f) =>
|
||||
f(ctx, builder.addNode.bind(builder), fileRead)
|
||||
);
|
||||
|
||||
buildNodesFns.forEach((f) => f(ctx, builder.addNode.bind(builder), fileRead));
|
||||
buildDependenciesFns.forEach((f) =>
|
||||
f(ctx, builder.nodes, builder.addDependency.bind(builder), fileRead)
|
||||
);
|
||||
|
||||
const projectGraph = builder.build();
|
||||
if (shouldCache) {
|
||||
writeCache({
|
||||
version: projectGraphCacheVersion,
|
||||
projectGraph,
|
||||
fileMap,
|
||||
});
|
||||
}
|
||||
return projectGraph;
|
||||
} else {
|
||||
// Cache file was modified _after_ all workspace files.
|
||||
// Safe to return the cached graph.
|
||||
return cache.data.projectGraph;
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
interface ProjectGraphCache {
|
||||
version: string;
|
||||
projectGraph: ProjectGraph;
|
||||
fileMap: FileMap;
|
||||
}
|
||||
|
||||
const distPath = `${appRootPath}/dist`;
|
||||
const nxDepsPath = `${distPath}/nxdeps.json`;
|
||||
|
||||
function readCache(): false | { data: ProjectGraphCache; mtime: number } {
|
||||
try {
|
||||
mkdirSync(distPath);
|
||||
} catch (e) {
|
||||
/*
|
||||
* @jeffbcross: Node JS docs recommend against checking for existence of directory immediately before creating it.
|
||||
* Instead, just try to create the directory and handle the error.
|
||||
*
|
||||
* We ran into race conditions when running scripts concurrently, where multiple scripts were
|
||||
* arriving here simultaneously, checking for directory existence, then trying to create the directory simultaneously.
|
||||
*
|
||||
* In this case, we're creating the directory. If the operation failed, we ensure that the directory
|
||||
* exists before continuing (or raise an exception).
|
||||
*/
|
||||
if (!directoryExists(distPath)) {
|
||||
throw new Error(`Failed to create directory: ${distPath}`);
|
||||
}
|
||||
}
|
||||
|
||||
const data = getValidCache(
|
||||
fileExists(nxDepsPath) ? readJsonFile(nxDepsPath) : null
|
||||
);
|
||||
|
||||
return data ? { data, mtime: mtime(nxDepsPath) } : false;
|
||||
}
|
||||
|
||||
function getValidCache(cache: ProjectGraphCache | null) {
|
||||
if (!cache) {
|
||||
return null;
|
||||
}
|
||||
if (
|
||||
cache.projectGraph &&
|
||||
cache.fileMap &&
|
||||
cache.version &&
|
||||
cache.version === projectGraphCacheVersion
|
||||
) {
|
||||
return cache;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function writeCache(cache: ProjectGraphCache): void {
|
||||
writeJsonFile(nxDepsPath, cache);
|
||||
}
|
||||
|
||||
function maxMTime(files: FileData[]) {
|
||||
return Math.max(...files.map((f) => f.mtime));
|
||||
}
|
||||
|
||||
function rootWorkspaceFileData(workspaceFiles: FileData[]): FileData[] {
|
||||
return [
|
||||
`package.json`,
|
||||
'workspace.json',
|
||||
'angular.json',
|
||||
`nx.json`,
|
||||
`tsconfig.base.json`,
|
||||
].reduce((acc: FileData[], curr: string) => {
|
||||
const fileData = workspaceFiles.find((x) => x.file === curr);
|
||||
if (fileData) {
|
||||
acc.push(fileData);
|
||||
}
|
||||
return acc;
|
||||
}, []);
|
||||
}
|
||||
|
||||
function modifiedSinceCache(
|
||||
fileMap: FileMap,
|
||||
c: false | { data: ProjectGraphCache; mtime: number }
|
||||
): { fileMap: FileMap; projectGraph?: ProjectGraph } {
|
||||
// No cache -> compute entire graph
|
||||
if (!c) {
|
||||
return { fileMap };
|
||||
}
|
||||
|
||||
const cachedFileMap = c.data.fileMap;
|
||||
const currentProjects = Object.keys(fileMap).sort();
|
||||
const previousProjects = Object.keys(cachedFileMap).sort();
|
||||
|
||||
// Projects changed -> compute entire graph
|
||||
if (
|
||||
currentProjects.length !== previousProjects.length ||
|
||||
currentProjects.some((val, idx) => val !== previousProjects[idx])
|
||||
) {
|
||||
return { fileMap };
|
||||
}
|
||||
|
||||
// Projects are same -> compute projects with file changes
|
||||
const modifiedSince: FileMap = {};
|
||||
currentProjects.forEach((p) => {
|
||||
let projectFilesChanged = false;
|
||||
for (const f of fileMap[p]) {
|
||||
const fromCache = cachedFileMap[p].find((x) => x.file === f.file);
|
||||
if (!fromCache || f.mtime > fromCache.mtime) {
|
||||
projectFilesChanged = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (projectFilesChanged) {
|
||||
modifiedSince[p] = fileMap[p];
|
||||
}
|
||||
});
|
||||
|
||||
// Re-compute nodes and dependencies for each project in file map.
|
||||
Object.keys(modifiedSince).forEach((key) => {
|
||||
delete c.data.projectGraph.dependencies[key];
|
||||
});
|
||||
|
||||
return { fileMap: modifiedSince, projectGraph: c.data.projectGraph };
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@ -65,49 +65,49 @@ describe('findTargetProjectWithImport', () => {
|
||||
proj: [
|
||||
{
|
||||
file: 'libs/proj/index.ts',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
ext: '.ts',
|
||||
},
|
||||
],
|
||||
proj2: [
|
||||
{
|
||||
file: 'libs/proj2/index.ts',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
ext: '.ts',
|
||||
},
|
||||
],
|
||||
proj3a: [
|
||||
{
|
||||
file: 'libs/proj3a/index.ts',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
ext: '.ts',
|
||||
},
|
||||
],
|
||||
proj4ab: [
|
||||
{
|
||||
file: 'libs/proj4ab/index.ts',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
ext: '.ts',
|
||||
},
|
||||
],
|
||||
proj123: [
|
||||
{
|
||||
file: 'libs/proj123/index.ts',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
ext: '.ts',
|
||||
},
|
||||
],
|
||||
proj1234: [
|
||||
{
|
||||
file: 'libs/proj1234/index.ts',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
ext: '.ts',
|
||||
},
|
||||
],
|
||||
'proj1234-child': [
|
||||
{
|
||||
file: 'libs/proj1234-child/index.ts',
|
||||
mtime: 0,
|
||||
hash: 'some-hash',
|
||||
ext: '.ts',
|
||||
},
|
||||
],
|
||||
|
||||
@ -33,7 +33,6 @@ export function checkDependencies(schema: Schema): Rule {
|
||||
ig = ig.add(tree.read('.gitignore').toString());
|
||||
}
|
||||
const files: FileData[] = [];
|
||||
const mtime = Date.now(); //can't get mtime data from the tree :(
|
||||
const workspaceDir = path.dirname(getWorkspacePath(tree));
|
||||
|
||||
for (const dir of tree.getDir('/').subdirs) {
|
||||
@ -45,7 +44,7 @@ export function checkDependencies(schema: Schema): Rule {
|
||||
files.push({
|
||||
file: path.relative(workspaceDir, file),
|
||||
ext: path.extname(file),
|
||||
mtime,
|
||||
hash: '',
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@ -7,7 +7,7 @@ import { ProjectGraph, ProjectGraphNode } from '../core/project-graph';
|
||||
import { Environment, NxJson } from '../core/shared-interfaces';
|
||||
import { NxArgs } from '@nrwl/workspace/src/command-line/utils';
|
||||
import { isRelativePath } from '../utils/fileutils';
|
||||
import { Hasher } from './hasher';
|
||||
import { Hasher } from '../core/hasher/hasher';
|
||||
import { projectHasTargetAndConfiguration } from '../utils/project-graph-utils';
|
||||
|
||||
type RunArgs = yargs.Arguments & ReporterArgs;
|
||||
@ -42,14 +42,11 @@ export async function runCommand<T extends RunArgs>(
|
||||
});
|
||||
|
||||
const hasher = new Hasher(projectGraph, nxJson, tasksOptions);
|
||||
await Promise.all(
|
||||
tasks.map(async (t) => {
|
||||
const hash = await hasher.hash(t);
|
||||
t.hash = hash.value;
|
||||
t.hashDetails = hash.details;
|
||||
})
|
||||
);
|
||||
|
||||
const res = await hasher.hashTasks(tasks);
|
||||
for (let i = 0; i < res.length; ++i) {
|
||||
tasks[i].hash = res[i].value;
|
||||
tasks[i].hashDetails = res[i].details;
|
||||
}
|
||||
const cached = [];
|
||||
tasksRunner(tasks, tasksOptions, {
|
||||
initiatingProject: initiatingProject,
|
||||
|
||||
@ -976,7 +976,7 @@ describe('Enforce Module Boundaries', () => {
|
||||
});
|
||||
|
||||
function createFile(f) {
|
||||
return { file: f, ext: extname(f), mtime: 1 };
|
||||
return { file: f, ext: extname(f), hash: '' };
|
||||
}
|
||||
|
||||
function runRule(
|
||||
|
||||
@ -409,16 +409,14 @@ export function getFullProjectGraphFromHost(host: Tree): ProjectGraph {
|
||||
|
||||
const workspaceFiles: FileData[] = [];
|
||||
|
||||
const mtime = +Date.now();
|
||||
|
||||
workspaceFiles.push(
|
||||
...allFilesInDirInHost(host, normalize(''), { recursive: false }).map((f) =>
|
||||
getFileDataInHost(host, f, mtime)
|
||||
getFileDataInHost(host, f)
|
||||
)
|
||||
);
|
||||
workspaceFiles.push(
|
||||
...allFilesInDirInHost(host, normalize('tools')).map((f) =>
|
||||
getFileDataInHost(host, f, mtime)
|
||||
getFileDataInHost(host, f)
|
||||
)
|
||||
);
|
||||
|
||||
@ -427,7 +425,7 @@ export function getFullProjectGraphFromHost(host: Tree): ProjectGraph {
|
||||
const project = workspaceJson.projects[projectName];
|
||||
workspaceFiles.push(
|
||||
...allFilesInDirInHost(host, normalize(project.root)).map((f) =>
|
||||
getFileDataInHost(host, f, mtime)
|
||||
getFileDataInHost(host, f)
|
||||
)
|
||||
);
|
||||
});
|
||||
@ -441,15 +439,11 @@ export function getFullProjectGraphFromHost(host: Tree): ProjectGraph {
|
||||
);
|
||||
}
|
||||
|
||||
export function getFileDataInHost(
|
||||
host: Tree,
|
||||
path: Path,
|
||||
mtime: number
|
||||
): FileData {
|
||||
export function getFileDataInHost(host: Tree, path: Path): FileData {
|
||||
return {
|
||||
file: path,
|
||||
ext: extname(normalize(path)),
|
||||
mtime,
|
||||
hash: '',
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user