fix(core): include config name in the cached fileset
This commit is contained in:
parent
899625c8a3
commit
438e4a544e
@ -27,7 +27,8 @@ export default async function run(
|
||||
);
|
||||
|
||||
const command = res.details['command'];
|
||||
const selfSource = res.details.nodes[`${task.target.project}:$filesets`];
|
||||
const selfSource =
|
||||
res.details.nodes[`${task.target.project}:$filesets:default`];
|
||||
|
||||
const nodes = {};
|
||||
const hashes = [] as string[];
|
||||
|
||||
@ -12,7 +12,7 @@ jest.mock('../utils/typescript');
|
||||
|
||||
import { vol } from 'memfs';
|
||||
import tsUtils = require('../utils/typescript');
|
||||
import { expandNamedInput, Hasher } from './hasher';
|
||||
import { expandNamedInput, filterUsingGlobPatterns, Hasher } from './hasher';
|
||||
|
||||
describe('Hasher', () => {
|
||||
const packageJson = {
|
||||
@ -45,14 +45,6 @@ describe('Hasher', () => {
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* const workSpaceJson = {
|
||||
* projects: {
|
||||
* parent: { root: 'libs/parent' },
|
||||
* child: { root: 'libs/child' },
|
||||
* },
|
||||
* };
|
||||
*/
|
||||
beforeEach(() => {
|
||||
vol.fromJSON(
|
||||
{
|
||||
@ -124,7 +116,7 @@ describe('Hasher', () => {
|
||||
|
||||
expect(hash.details.command).toEqual('parent|build||{"prop":"prop-value"}');
|
||||
expect(hash.details.nodes).toEqual({
|
||||
'parent:$filesets':
|
||||
'parent:$filesets:default':
|
||||
'/file|file.hash|{"root":"libs/parent","targets":{"build":{"inputs":["default","^default",{"runtime":"echo runtime123"},{"env":"TESTENV"},{"env":"NONEXISTENTENV"}]}}}|{"compilerOptions":{"paths":{"@nrwl/parent":["libs/parent/src/index.ts"],"@nrwl/child":["libs/child/src/index.ts"]}}}',
|
||||
'{workspaceRoot}/yarn.lock': 'yarn.lock.hash',
|
||||
'{workspaceRoot}/package-lock.json': 'package-lock.json.hash',
|
||||
@ -186,9 +178,9 @@ describe('Hasher', () => {
|
||||
|
||||
// note that the parent hash is based on parent source files only!
|
||||
expect(onlySourceNodes(hash.details.nodes)).toEqual({
|
||||
'child:$filesets':
|
||||
'child:$filesets:default':
|
||||
'/fileb.ts|/fileb.spec.ts|b.hash|b.spec.hash|{"root":"libs/child","targets":{"build":{}}}|{"compilerOptions":{"paths":{"@nrwl/parent":["libs/parent/src/index.ts"],"@nrwl/child":["libs/child/src/index.ts"]}}}',
|
||||
'parent:$filesets':
|
||||
'parent:$filesets:default':
|
||||
'/filea.ts|/filea.spec.ts|a.hash|a.spec.hash|{"root":"libs/parent","targets":{"build":{}}}|{"compilerOptions":{"paths":{"@nrwl/parent":["libs/parent/src/index.ts"],"@nrwl/child":["libs/child/src/index.ts"]}}}',
|
||||
});
|
||||
});
|
||||
@ -250,13 +242,111 @@ describe('Hasher', () => {
|
||||
});
|
||||
|
||||
expect(onlySourceNodes(hash.details.nodes)).toEqual({
|
||||
'child:$filesets':
|
||||
'child:$filesets:prod':
|
||||
'libs/child/fileb.ts|libs/child/fileb.spec.ts|b.hash|b.spec.hash|{"root":"libs/child","namedInputs":{"prod":["default"]},"targets":{"build":{}}}|{"compilerOptions":{"paths":{"@nrwl/parent":["libs/parent/src/index.ts"],"@nrwl/child":["libs/child/src/index.ts"]}}}',
|
||||
'parent:$filesets':
|
||||
'parent:$filesets:default':
|
||||
'libs/parent/filea.ts|a.hash|{"root":"libs/parent","targets":{"build":{"inputs":["prod","^prod"]}}}|{"compilerOptions":{"paths":{"@nrwl/parent":["libs/parent/src/index.ts"],"@nrwl/child":["libs/child/src/index.ts"]}}}',
|
||||
});
|
||||
});
|
||||
|
||||
it('should be able to handle multiple filesets per project', async () => {
|
||||
process.env.MY_TEST_HASH_ENV = 'MY_TEST_HASH_ENV_VALUE';
|
||||
const hasher = new Hasher(
|
||||
{
|
||||
nodes: {
|
||||
parent: {
|
||||
name: 'parent',
|
||||
type: 'lib',
|
||||
data: {
|
||||
root: 'libs/parent',
|
||||
targets: {
|
||||
test: {
|
||||
inputs: ['default', '^prod'],
|
||||
},
|
||||
},
|
||||
files: [
|
||||
{ file: 'libs/parent/filea.ts', hash: 'a.hash' },
|
||||
{ file: 'libs/parent/filea.spec.ts', hash: 'a.spec.hash' },
|
||||
],
|
||||
},
|
||||
},
|
||||
child: {
|
||||
name: 'child',
|
||||
type: 'lib',
|
||||
data: {
|
||||
root: 'libs/child',
|
||||
namedInputs: {
|
||||
prod: [
|
||||
'!{projectRoot}/**/*.spec.ts',
|
||||
'{workspaceRoot}/global2',
|
||||
{ env: 'MY_TEST_HASH_ENV' },
|
||||
],
|
||||
},
|
||||
targets: {
|
||||
test: {
|
||||
inputs: ['default'],
|
||||
},
|
||||
},
|
||||
files: [
|
||||
{ file: 'libs/child/fileb.ts', hash: 'b.hash' },
|
||||
{ file: 'libs/child/fileb.spec.ts', hash: 'b.spec.hash' },
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
dependencies: {
|
||||
parent: [{ source: 'parent', target: 'child', type: 'static' }],
|
||||
},
|
||||
allWorkspaceFiles,
|
||||
},
|
||||
{
|
||||
namedInputs: {
|
||||
default: ['{projectRoot}/**/*', '{workspaceRoot}/global1'],
|
||||
prod: ['!{projectRoot}/**/*.spec.ts'],
|
||||
},
|
||||
} as any,
|
||||
{},
|
||||
createHashing()
|
||||
);
|
||||
|
||||
const parentHash = await hasher.hashTask({
|
||||
target: { project: 'parent', target: 'test' },
|
||||
id: 'parent-test',
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
expect(parentHash.details.nodes['parent:$filesets:default']).toContain(
|
||||
'libs/parent/filea.ts|libs/parent/filea.spec.ts|a.hash|a.spec.hash|'
|
||||
);
|
||||
expect(parentHash.details.nodes['child:$filesets:prod']).toContain(
|
||||
'libs/child/fileb.ts|b.hash|'
|
||||
);
|
||||
expect(parentHash.details.nodes['{workspaceRoot}/global1']).toEqual(
|
||||
'global1.hash'
|
||||
);
|
||||
expect(parentHash.details.nodes['{workspaceRoot}/global2']).toBe(
|
||||
'global2.hash'
|
||||
);
|
||||
expect(parentHash.details.nodes['env:MY_TEST_HASH_ENV']).toEqual(
|
||||
'MY_TEST_HASH_ENV_VALUE'
|
||||
);
|
||||
|
||||
const childHash = await hasher.hashTask({
|
||||
target: { project: 'child', target: 'test' },
|
||||
id: 'child-test',
|
||||
overrides: { prop: 'prop-value' },
|
||||
});
|
||||
|
||||
expect(childHash.details.nodes['child:$filesets:default']).toContain(
|
||||
'libs/child/fileb.ts|libs/child/fileb.spec.ts|b.hash|b.spec.hash|'
|
||||
);
|
||||
expect(childHash.details.nodes['{workspaceRoot}/global1']).toEqual(
|
||||
'global1.hash'
|
||||
);
|
||||
expect(childHash.details.nodes['{workspaceRoot}/global2']).toBe(undefined);
|
||||
expect(childHash.details.nodes['env:MY_TEST_HASH_ENV']).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should use targetdefaults from nx.json', async () => {
|
||||
const hasher = new Hasher(
|
||||
{
|
||||
@ -314,9 +404,9 @@ describe('Hasher', () => {
|
||||
});
|
||||
|
||||
expect(onlySourceNodes(hash.details.nodes)).toEqual({
|
||||
'child:$filesets':
|
||||
'child:$filesets:prod':
|
||||
'libs/child/fileb.ts|b.hash|{"root":"libs/child","targets":{"build":{}}}|{"compilerOptions":{"paths":{"@nrwl/parent":["libs/parent/src/index.ts"],"@nrwl/child":["libs/child/src/index.ts"]}}}',
|
||||
'parent:$filesets':
|
||||
'parent:$filesets:default':
|
||||
'libs/parent/filea.ts|a.hash|{"root":"libs/parent","targets":{"build":{}}}|{"compilerOptions":{"paths":{"@nrwl/parent":["libs/parent/src/index.ts"],"@nrwl/child":["libs/child/src/index.ts"]}}}',
|
||||
});
|
||||
});
|
||||
@ -364,7 +454,7 @@ describe('Hasher', () => {
|
||||
|
||||
expect(hash.details.command).toEqual('parent|build||{"prop":"prop-value"}');
|
||||
expect(onlySourceNodes(hash.details.nodes)).toEqual({
|
||||
'parent:$filesets':
|
||||
'parent:$filesets:default':
|
||||
'/file|file.hash|{"root":"libs/parent","targets":{"build":{}}}|{"compilerOptions":{"paths":{"@nrwl/parent":["libs/parent/src/index.ts"]}}}',
|
||||
});
|
||||
});
|
||||
@ -416,9 +506,9 @@ describe('Hasher', () => {
|
||||
expect(tasksHash.value).toContain('parent|build'); //project and target
|
||||
expect(tasksHash.value).toContain('build'); //target
|
||||
expect(onlySourceNodes(tasksHash.details.nodes)).toEqual({
|
||||
'child:$filesets':
|
||||
'child:$filesets:default':
|
||||
'/fileb.ts|b.hash|{"root":"libs/child","targets":{"build":{}}}|{"compilerOptions":{"paths":{"@nrwl/parent":["libs/parent/src/index.ts"],"@nrwl/child":["libs/child/src/index.ts"]}}}',
|
||||
'parent:$filesets':
|
||||
'parent:$filesets:default':
|
||||
'/filea.ts|a.hash|{"root":"libs/parent","targets":{"build":{}}}|{"compilerOptions":{"paths":{"@nrwl/parent":["libs/parent/src/index.ts"],"@nrwl/child":["libs/child/src/index.ts"]}}}',
|
||||
});
|
||||
|
||||
@ -435,9 +525,9 @@ describe('Hasher', () => {
|
||||
expect(hashb.value).toContain('child|build'); //project and target
|
||||
expect(hashb.value).toContain('build'); //target
|
||||
expect(onlySourceNodes(hashb.details.nodes)).toEqual({
|
||||
'child:$filesets':
|
||||
'child:$filesets:default':
|
||||
'/fileb.ts|b.hash|{"root":"libs/child","targets":{"build":{}}}|{"compilerOptions":{"paths":{"@nrwl/parent":["libs/parent/src/index.ts"],"@nrwl/child":["libs/child/src/index.ts"]}}}',
|
||||
'parent:$filesets':
|
||||
'parent:$filesets:default':
|
||||
'/filea.ts|a.hash|{"root":"libs/parent","targets":{"build":{}}}|{"compilerOptions":{"paths":{"@nrwl/parent":["libs/parent/src/index.ts"],"@nrwl/child":["libs/child/src/index.ts"]}}}',
|
||||
});
|
||||
});
|
||||
@ -565,7 +655,7 @@ describe('Hasher', () => {
|
||||
|
||||
// note that the parent hash is based on parent source files only!
|
||||
expect(onlySourceNodes(hash.details.nodes)).toEqual({
|
||||
'app:$filesets':
|
||||
'app:$filesets:default':
|
||||
'/filea.ts|a.hash|{"root":"apps/app","targets":{"build":{}}}|{"compilerOptions":{"paths":{"@nrwl/parent":["libs/parent/src/index.ts"],"@nrwl/child":["libs/child/src/index.ts"]}}}',
|
||||
'npm:react': '17.0.0',
|
||||
});
|
||||
@ -611,7 +701,7 @@ describe('Hasher', () => {
|
||||
|
||||
// note that the parent hash is based on parent source files only!
|
||||
expect(onlySourceNodes(hash.details.nodes)).toEqual({
|
||||
'app:$filesets':
|
||||
'app:$filesets:default':
|
||||
'/filea.ts|a.hash|{"root":"apps/app","targets":{"build":{}}}|{"compilerOptions":{"paths":{"@nrwl/parent":["libs/parent/src/index.ts"],"@nrwl/child":["libs/child/src/index.ts"]}}}',
|
||||
'npm:react': '__npm:react__',
|
||||
});
|
||||
@ -655,6 +745,62 @@ describe('Hasher', () => {
|
||||
expect(expanded).toEqual([{ fileset: 'c' }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('filterUsingGlobPatterns', () => {
|
||||
it('should OR all positive patterns and AND all negative patterns (when positive and negative patterns)', () => {
|
||||
const filtered = filterUsingGlobPatterns(
|
||||
'/root',
|
||||
[
|
||||
{ file: '/root/a.ts' },
|
||||
{ file: '/root/b.js' },
|
||||
{ file: '/root/c.spec.ts' },
|
||||
{ file: '/root/d.md' },
|
||||
] as any,
|
||||
[
|
||||
'/root/**/*.ts',
|
||||
'/root/**/*.js',
|
||||
'!/root/**/*.spec.ts',
|
||||
'!/root/**/*.md',
|
||||
]
|
||||
);
|
||||
|
||||
expect(filtered.map((f) => f.file)).toEqual(['/root/a.ts', '/root/b.js']);
|
||||
});
|
||||
|
||||
it('should OR all positive patterns and AND all negative patterns (when negative patterns)', () => {
|
||||
const filtered = filterUsingGlobPatterns(
|
||||
'/root',
|
||||
[
|
||||
{ file: '/root/a.ts' },
|
||||
{ file: '/root/b.js' },
|
||||
{ file: '/root/c.spec.ts' },
|
||||
{ file: '/root/d.md' },
|
||||
] as any,
|
||||
['!/root/**/*.spec.ts', '!/root/**/*.md']
|
||||
);
|
||||
|
||||
expect(filtered.map((f) => f.file)).toEqual(['/root/a.ts', '/root/b.js']);
|
||||
});
|
||||
|
||||
it('should OR all positive patterns and AND all negative patterns (when positive patterns)', () => {
|
||||
const filtered = filterUsingGlobPatterns(
|
||||
'/root',
|
||||
[
|
||||
{ file: '/root/a.ts' },
|
||||
{ file: '/root/b.js' },
|
||||
{ file: '/root/c.spec.ts' },
|
||||
{ file: '/root/d.md' },
|
||||
] as any,
|
||||
['/root/**/*.ts', '/root/**/*.js']
|
||||
);
|
||||
|
||||
expect(filtered.map((f) => f.file)).toEqual([
|
||||
'/root/a.ts',
|
||||
'/root/b.js',
|
||||
'/root/c.spec.ts',
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function onlySourceNodes(nodes: { [name: string]: string }) {
|
||||
|
||||
@ -116,8 +116,6 @@ export class Hasher {
|
||||
}
|
||||
|
||||
hashDependsOnOtherTasks(task: Task) {
|
||||
const inputs = this.taskHasher.inputs(task);
|
||||
// check here for outputs
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -216,15 +214,69 @@ class TaskHasher {
|
||||
return Promise.resolve().then(async () => {
|
||||
const projectNode = this.projectGraph.nodes[task.target.project];
|
||||
if (!projectNode) {
|
||||
return this.hashExternalDependency(task);
|
||||
return this.hashExternalDependency(task.target.project);
|
||||
}
|
||||
const namedInputs = {
|
||||
default: [{ fileset: '{projectRoot}/**/*' }],
|
||||
...this.nxJson.namedInputs,
|
||||
...projectNode.data.namedInputs,
|
||||
};
|
||||
const targetData = projectNode.data.targets[task.target.target];
|
||||
const targetDefaults = (this.nxJson.targetDefaults || {})[
|
||||
task.target.target
|
||||
];
|
||||
const { selfInputs, depsInputs } = splitInputsIntoSelfAndDependencies(
|
||||
targetData.inputs || targetDefaults?.inputs || DEFAULT_INPUTS,
|
||||
namedInputs
|
||||
);
|
||||
|
||||
return this.hashSelfAndDepsInputs(
|
||||
task.target.project,
|
||||
'default',
|
||||
selfInputs,
|
||||
depsInputs,
|
||||
visited
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
const projectGraphDeps =
|
||||
this.projectGraph.dependencies[task.target.project] ?? [];
|
||||
private async hashNamedInput(
|
||||
projectName: string,
|
||||
namedInput: string,
|
||||
visited: string[]
|
||||
): Promise<PartialHash> {
|
||||
const projectNode = this.projectGraph.nodes[projectName];
|
||||
if (!projectNode) {
|
||||
return this.hashExternalDependency(projectName);
|
||||
}
|
||||
const namedInputs = {
|
||||
default: [{ fileset: '{projectRoot}/**/*' }],
|
||||
...this.nxJson.namedInputs,
|
||||
...projectNode.data.namedInputs,
|
||||
};
|
||||
|
||||
const { selfInputs, depsInputs } = this.inputs(task);
|
||||
const self = await this.hashSelfInputs(task, selfInputs);
|
||||
const deps = await this.hashDepsTasks(
|
||||
const selfInputs = expandNamedInput(namedInput, namedInputs);
|
||||
const depsInputs = [{ input: namedInput }];
|
||||
return this.hashSelfAndDepsInputs(
|
||||
projectName,
|
||||
namedInput,
|
||||
selfInputs,
|
||||
depsInputs,
|
||||
visited
|
||||
);
|
||||
}
|
||||
|
||||
private async hashSelfAndDepsInputs(
|
||||
projectName: string,
|
||||
namedInput: string,
|
||||
selfInputs: ExpandedSelfInput[],
|
||||
depsInputs: { input: string }[],
|
||||
visited: string[]
|
||||
) {
|
||||
const projectGraphDeps = this.projectGraph.dependencies[projectName] ?? [];
|
||||
|
||||
const self = await this.hashSelfInputs(projectName, namedInput, selfInputs);
|
||||
const deps = await this.hashDepsInputs(
|
||||
depsInputs,
|
||||
projectGraphDeps,
|
||||
visited
|
||||
@ -244,10 +296,9 @@ class TaskHasher {
|
||||
]);
|
||||
|
||||
return { value, details };
|
||||
});
|
||||
}
|
||||
|
||||
private async hashDepsTasks(
|
||||
private async hashDepsInputs(
|
||||
inputs: { input: string }[],
|
||||
projectGraphDeps: ProjectGraphDependency[],
|
||||
visited: string[]
|
||||
@ -261,16 +312,9 @@ class TaskHasher {
|
||||
return null;
|
||||
} else {
|
||||
visited.push(d.target);
|
||||
return await this.hashTask(
|
||||
{
|
||||
id: `${d.target}:$input:${input.input}`,
|
||||
target: {
|
||||
project: d.target,
|
||||
target: '$input',
|
||||
configuration: input.input,
|
||||
},
|
||||
overrides: {},
|
||||
},
|
||||
return await this.hashNamedInput(
|
||||
d.target,
|
||||
input.input || 'default',
|
||||
visited
|
||||
);
|
||||
}
|
||||
@ -283,36 +327,8 @@ class TaskHasher {
|
||||
.filter((r) => !!r);
|
||||
}
|
||||
|
||||
inputs(task: Task): {
|
||||
depsInputs: { input: string }[];
|
||||
selfInputs: ExpandedSelfInput[];
|
||||
} {
|
||||
const projectNode = this.projectGraph.nodes[task.target.project];
|
||||
const namedInputs = {
|
||||
default: [{ fileset: '{projectRoot}/**/*' }],
|
||||
...this.nxJson.namedInputs,
|
||||
...projectNode.data.namedInputs,
|
||||
};
|
||||
if (task.target.target === '$input') {
|
||||
return {
|
||||
depsInputs: [{ input: task.target.configuration }],
|
||||
selfInputs: expandNamedInput(task.target.configuration, namedInputs),
|
||||
};
|
||||
} else {
|
||||
const targetData = projectNode.data.targets[task.target.target];
|
||||
const targetDefaults = (this.nxJson.targetDefaults || {})[
|
||||
task.target.target
|
||||
];
|
||||
// task from TaskGraph can be added here
|
||||
return splitInputsIntoSelfAndDependencies(
|
||||
targetData.inputs || targetDefaults?.inputs || DEFAULT_INPUTS,
|
||||
namedInputs
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private hashExternalDependency(task: Task) {
|
||||
const n = this.projectGraph.externalNodes[task.target.project];
|
||||
private hashExternalDependency(projectName: string) {
|
||||
const n = this.projectGraph.externalNodes[projectName];
|
||||
const version = n?.data?.version;
|
||||
let hash: string;
|
||||
if (version) {
|
||||
@ -329,18 +345,19 @@ class TaskHasher {
|
||||
// The actual checksum added here is of no importance as
|
||||
// the version is unknown and may only change when some
|
||||
// other change occurs in package.json and/or package-lock.json
|
||||
hash = `__${task.target.project}__`;
|
||||
hash = `__${projectName}__`;
|
||||
}
|
||||
return {
|
||||
value: hash,
|
||||
details: {
|
||||
[task.target.project]: version || hash,
|
||||
[projectName]: version || hash,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
private async hashSelfInputs(
|
||||
task: Task,
|
||||
projectName: string,
|
||||
namedInput: string,
|
||||
inputs: ExpandedSelfInput[]
|
||||
): Promise<PartialHash[]> {
|
||||
const filesets = inputs
|
||||
@ -349,7 +366,8 @@ class TaskHasher {
|
||||
|
||||
const projectFilesets = [];
|
||||
const workspaceFilesets = [];
|
||||
let invalidFileset = null;
|
||||
let invalidFilesetNoPrefix = null;
|
||||
let invalidFilesetWorkspaceRootNegative = null;
|
||||
|
||||
for (let f of filesets) {
|
||||
if (f.startsWith('{projectRoot}/') || f.startsWith('!{projectRoot}/')) {
|
||||
@ -360,22 +378,32 @@ class TaskHasher {
|
||||
) {
|
||||
workspaceFilesets.push(f);
|
||||
} else {
|
||||
invalidFileset = f;
|
||||
invalidFilesetNoPrefix = f;
|
||||
}
|
||||
}
|
||||
if (invalidFileset) {
|
||||
|
||||
if (invalidFilesetNoPrefix) {
|
||||
throw new Error(
|
||||
[
|
||||
`"${invalidFileset}" is an invalid fileset.`,
|
||||
`"${invalidFilesetNoPrefix}" is an invalid fileset.`,
|
||||
'All filesets have to start with either {workspaceRoot} or {projectRoot}.',
|
||||
'For instance: "!{projectRoot}/**/*.spec.ts" or "{workspaceRoot}/package.json".',
|
||||
`If "${invalidFileset}" is a named input, make sure it is defined in, for instance, nx.json.`,
|
||||
`If "${invalidFilesetNoPrefix}" is a named input, make sure it is defined in, for instance, nx.json.`,
|
||||
].join('\n')
|
||||
);
|
||||
}
|
||||
if (invalidFilesetWorkspaceRootNegative) {
|
||||
throw new Error(
|
||||
[
|
||||
`"${invalidFilesetWorkspaceRootNegative}" is an invalid fileset.`,
|
||||
'It is not possible to negative filesets starting with {workspaceRoot}.',
|
||||
].join('\n')
|
||||
);
|
||||
}
|
||||
|
||||
const notFilesets = inputs.filter((r) => !r['fileset']);
|
||||
return Promise.all([
|
||||
this.hashTaskFileset(task, projectFilesets),
|
||||
this.hashProjectFileset(projectName, namedInput, projectFilesets),
|
||||
...[
|
||||
...workspaceFilesets,
|
||||
...this.legacyFilesetInputs.map((r) => r.fileset),
|
||||
@ -416,18 +444,19 @@ class TaskHasher {
|
||||
return this.filesetHashes[mapKey];
|
||||
}
|
||||
|
||||
private async hashTaskFileset(
|
||||
task: Task,
|
||||
private async hashProjectFileset(
|
||||
projectName: string,
|
||||
namedInput: string,
|
||||
filesetPatterns: string[]
|
||||
): Promise<PartialHash> {
|
||||
const mapKey = `${task.target.project}:$filesets`;
|
||||
const mapKey = `${projectName}:$filesets:${namedInput}`;
|
||||
if (!this.filesetHashes[mapKey]) {
|
||||
this.filesetHashes[mapKey] = new Promise(async (res) => {
|
||||
const p = this.projectGraph.nodes[task.target.project];
|
||||
const p = this.projectGraph.nodes[projectName];
|
||||
const filesetWithExpandedProjectRoot = filesetPatterns.map((f) =>
|
||||
f.replace('{projectRoot}', p.data.root)
|
||||
);
|
||||
const filteredFiles = this.filterFiles(
|
||||
const filteredFiles = filterUsingGlobPatterns(
|
||||
p.data.root,
|
||||
p.data.files,
|
||||
filesetWithExpandedProjectRoot
|
||||
@ -482,17 +511,6 @@ class TaskHasher {
|
||||
};
|
||||
}
|
||||
|
||||
private filterFiles(
|
||||
projectRoot: string,
|
||||
files: FileData[],
|
||||
patterns: string[]
|
||||
) {
|
||||
if (patterns.indexOf(`${projectRoot}/**/*`) > -1) return files;
|
||||
return files.filter(
|
||||
(f) => !!patterns.find((pattern) => minimatch(f.file, pattern))
|
||||
);
|
||||
}
|
||||
|
||||
private hashTsConfig(p: ProjectGraphProjectNode) {
|
||||
if (this.options.selectivelyHashTsConfig) {
|
||||
return this.removeOtherProjectsPathRecords(p);
|
||||
@ -584,3 +602,39 @@ export function expandNamedInput(
|
||||
if (!namedInputs[input]) throw new Error(`Input '${input}' is not defined`);
|
||||
return expandSelfInputs(namedInputs[input], namedInputs);
|
||||
}
|
||||
|
||||
export function filterUsingGlobPatterns(
|
||||
projectRoot: string,
|
||||
files: FileData[],
|
||||
patterns: string[]
|
||||
): FileData[] {
|
||||
const positive = [];
|
||||
const negative = [];
|
||||
for (const p of patterns) {
|
||||
if (p.startsWith('!')) {
|
||||
negative.push(p);
|
||||
} else {
|
||||
positive.push(p);
|
||||
}
|
||||
}
|
||||
|
||||
if (positive.length === 0 && negative.length === 0) {
|
||||
return files;
|
||||
}
|
||||
|
||||
return files.filter((f) => {
|
||||
let matchedPositive = false;
|
||||
if (
|
||||
positive.length === 0 ||
|
||||
(positive.length === 1 && positive[0] === `${projectRoot}/**/*`)
|
||||
) {
|
||||
matchedPositive = true;
|
||||
} else {
|
||||
matchedPositive = positive.some((pattern) => minimatch(f.file, pattern));
|
||||
}
|
||||
|
||||
if (!matchedPositive) return false;
|
||||
|
||||
return negative.every((pattern) => minimatch(f.file, pattern));
|
||||
});
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user