feat(core): add WorkspaceContext class (#18999)
This commit is contained in:
parent
ae154e777e
commit
537d7eb8be
1
.gitignore
vendored
1
.gitignore
vendored
@ -12,6 +12,7 @@ tmp
|
||||
jest.debug.config.js
|
||||
.tool-versions
|
||||
/.nx-cache
|
||||
/.nx
|
||||
/.verdaccio/build/local-registry
|
||||
/graph/client/src/assets/environment.js
|
||||
/graph/client/src/assets/dev/environment.js
|
||||
|
||||
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -1354,6 +1354,7 @@ dependencies = [
|
||||
"napi-derive",
|
||||
"once_cell",
|
||||
"os_type",
|
||||
"parking_lot",
|
||||
"rayon",
|
||||
"regex",
|
||||
"swc_common",
|
||||
|
||||
@ -31,6 +31,10 @@ describe('Nx Commands', () => {
|
||||
runCLI(`generate @nx/js:lib ${proj3}`);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
runCLI('reset');
|
||||
});
|
||||
|
||||
afterAll(() => cleanupProject());
|
||||
|
||||
it('should watch for project changes', async () => {
|
||||
|
||||
@ -31,6 +31,7 @@ import {
|
||||
import { NxJsonConfiguration, output } from '@nx/devkit';
|
||||
import { readFileSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
import { resetWorkspaceContext } from 'nx/src/utils/workspace-context';
|
||||
|
||||
let projName: string;
|
||||
|
||||
@ -566,4 +567,5 @@ export function cleanupProject({
|
||||
removeSync(tmpProjPath());
|
||||
} catch {}
|
||||
}
|
||||
resetWorkspaceContext();
|
||||
}
|
||||
|
||||
@ -15,6 +15,7 @@ ignore-files = "1.3.0"
|
||||
itertools = "0.10.5"
|
||||
once_cell = "1.18.0"
|
||||
os_type = "2.6.0"
|
||||
parking_lot = { version = "0.12.1", features = ["send_guard"] }
|
||||
napi = { version = '2.12.6', default-features = false, features = ['anyhow', 'napi4', 'tokio_rt'] }
|
||||
napi-derive = '2.9.3'
|
||||
regex = "1.9.1"
|
||||
|
||||
@ -19,6 +19,8 @@ import { execSync } from 'child_process';
|
||||
import { join } from 'path';
|
||||
import { assertSupportedPlatform } from '../src/native/assert-supported-platform';
|
||||
import { performance } from 'perf_hooks';
|
||||
import { setupWorkspaceContext } from '../src/utils/workspace-context';
|
||||
import { daemonClient } from '../src/daemon/client/client';
|
||||
|
||||
function main() {
|
||||
if (
|
||||
@ -64,6 +66,11 @@ function main() {
|
||||
) {
|
||||
require('v8-compile-cache');
|
||||
}
|
||||
|
||||
if (!daemonClient.enabled() && workspace !== null) {
|
||||
setupWorkspaceContext(workspace.dir);
|
||||
}
|
||||
|
||||
// polyfill rxjs observable to avoid issues with multiple version of Observable installed in node_modules
|
||||
// https://twitter.com/BenLesh/status/1192478226385428483?s=20
|
||||
if (!(Symbol as any).observable)
|
||||
|
||||
@ -13,7 +13,6 @@ import { NxJsonConfiguration } from '../../config/nx-json';
|
||||
import { InProcessTaskHasher } from '../../hasher/task-hasher';
|
||||
import { hashTask } from '../../hasher/hash-task';
|
||||
import { getPackageManagerCommand } from '../../utils/package-manager';
|
||||
import { fileHasher } from '../../hasher/file-hasher';
|
||||
import { printAffectedDeprecationMessage } from './command-object';
|
||||
import { logger, NX_PREFIX } from '../../utils/logger';
|
||||
|
||||
@ -72,14 +71,7 @@ async function createTasks(
|
||||
nxArgs.configuration,
|
||||
overrides
|
||||
);
|
||||
const hasher = new InProcessTaskHasher(
|
||||
{},
|
||||
[],
|
||||
projectGraph,
|
||||
nxJson,
|
||||
{},
|
||||
fileHasher
|
||||
);
|
||||
const hasher = new InProcessTaskHasher({}, [], projectGraph, nxJson, {});
|
||||
const execCommand = getPackageManagerCommand().exec;
|
||||
const tasks = Object.values(taskGraph.tasks);
|
||||
|
||||
|
||||
@ -22,12 +22,10 @@ import {
|
||||
createTaskGraph,
|
||||
mapTargetDefaultsToDependencies,
|
||||
} from '../../tasks-runner/create-task-graph';
|
||||
import { TargetDefaults, TargetDependencies } from '../../config/nx-json';
|
||||
import { TaskGraph } from '../../config/task-graph';
|
||||
import { daemonClient } from '../../daemon/client/client';
|
||||
import { Server } from 'net';
|
||||
import { readProjectFileMapCache } from '../../project-graph/nx-deps-cache';
|
||||
import { fileHasher } from '../../hasher/file-hasher';
|
||||
import { getAffectedGraphNodes } from '../affected/affected';
|
||||
import { splitArgsIntoNxArgsAndOverrides } from '../../utils/command-line-utils';
|
||||
|
||||
@ -574,7 +572,6 @@ async function createDepGraphClientResponse(
|
||||
affected: string[] = []
|
||||
): Promise<ProjectGraphClientResponse> {
|
||||
performance.mark('project graph watch calculation:start');
|
||||
await fileHasher.init();
|
||||
|
||||
let graph = pruneExternalNodes(
|
||||
await createProjectGraphAsync({ exitOnError: true })
|
||||
|
||||
@ -2,7 +2,6 @@ import { Task, TaskGraph } from '../../config/task-graph';
|
||||
import { getCachedSerializedProjectGraphPromise } from './project-graph-incremental-recomputation';
|
||||
import { InProcessTaskHasher } from '../../hasher/task-hasher';
|
||||
import { readNxJson } from '../../config/configuration';
|
||||
import { fileHasher } from '../../hasher/file-hasher';
|
||||
import { setHashEnv } from '../../hasher/set-hash-env';
|
||||
|
||||
/**
|
||||
@ -31,8 +30,7 @@ export async function handleHashTasks(payload: {
|
||||
allWorkspaceFiles,
|
||||
projectGraph,
|
||||
nxJson,
|
||||
payload.runnerOptions,
|
||||
fileHasher
|
||||
payload.runnerOptions
|
||||
);
|
||||
}
|
||||
const response = JSON.stringify(
|
||||
|
||||
@ -1,7 +1,8 @@
|
||||
import { fileHasher } from '../../hasher/file-hasher';
|
||||
import { getAllFileDataInContext } from '../../utils/workspace-context';
|
||||
import { workspaceRoot } from '../../utils/workspace-root';
|
||||
|
||||
export async function handleRequestFileData() {
|
||||
const response = JSON.stringify(fileHasher.allFileData());
|
||||
const response = JSON.stringify(getAllFileDataInContext(workspaceRoot));
|
||||
return {
|
||||
response,
|
||||
description: 'handleRequestFileData',
|
||||
|
||||
@ -17,7 +17,7 @@ import { notifyFileWatcherSockets } from './file-watching/file-watcher-sockets';
|
||||
import { serverLogger } from './logger';
|
||||
import { workspaceRoot } from '../../utils/workspace-root';
|
||||
import { execSync } from 'child_process';
|
||||
import { fileHasher, hashArray } from '../../hasher/file-hasher';
|
||||
import { hashArray } from '../../hasher/file-hasher';
|
||||
import {
|
||||
retrieveWorkspaceFiles,
|
||||
retrieveProjectConfigurations,
|
||||
@ -27,6 +27,10 @@ import {
|
||||
ProjectsConfigurations,
|
||||
} from '../../config/workspace-json-project-json';
|
||||
import { readNxJson } from '../../config/nx-json';
|
||||
import {
|
||||
resetWorkspaceContext,
|
||||
updateFilesInContext,
|
||||
} from '../../utils/workspace-context';
|
||||
|
||||
let cachedSerializedProjectGraphPromise: Promise<{
|
||||
error: Error | null;
|
||||
@ -163,17 +167,17 @@ function filterUpdatedFiles(files: string[]) {
|
||||
async function processCollectedUpdatedAndDeletedFiles() {
|
||||
try {
|
||||
performance.mark('hash-watched-changes-start');
|
||||
const updatedFiles = await fileHasher.hashFiles(
|
||||
filterUpdatedFiles([...collectedUpdatedFiles.values()])
|
||||
);
|
||||
const updatedFiles = filterUpdatedFiles([
|
||||
...collectedUpdatedFiles.values(),
|
||||
]);
|
||||
const deletedFiles = [...collectedDeletedFiles.values()];
|
||||
let updatedFileHashes = updateFilesInContext(updatedFiles, deletedFiles);
|
||||
performance.mark('hash-watched-changes-end');
|
||||
performance.measure(
|
||||
'hash changed files from watcher',
|
||||
'hash-watched-changes-start',
|
||||
'hash-watched-changes-end'
|
||||
);
|
||||
fileHasher.incrementalUpdate(updatedFiles, deletedFiles);
|
||||
|
||||
const nxJson = readNxJson(workspaceRoot);
|
||||
|
||||
@ -201,7 +205,7 @@ async function processCollectedUpdatedAndDeletedFiles() {
|
||||
projectNodes,
|
||||
projectFileMapWithFiles.projectFileMap,
|
||||
projectFileMapWithFiles.allWorkspaceFiles,
|
||||
updatedFiles,
|
||||
new Map(Object.entries(updatedFileHashes)),
|
||||
deletedFiles
|
||||
);
|
||||
} else {
|
||||
@ -330,8 +334,7 @@ async function resetInternalState() {
|
||||
currentProjectGraph = undefined;
|
||||
collectedUpdatedFiles.clear();
|
||||
collectedDeletedFiles.clear();
|
||||
fileHasher.clear();
|
||||
await fileHasher.ensureInitialized();
|
||||
resetWorkspaceContext();
|
||||
waitPeriod = 100;
|
||||
}
|
||||
|
||||
|
||||
@ -56,8 +56,10 @@ import { readJsonFile } from '../../utils/fileutils';
|
||||
import { PackageJson } from '../../utils/package-json';
|
||||
import { getDaemonProcessIdSync, writeDaemonJsonProcessCache } from '../cache';
|
||||
import { handleHashTasks } from './handle-hash-tasks';
|
||||
import { fileHasher, hashArray } from '../../hasher/file-hasher';
|
||||
import { hashArray } from '../../hasher/file-hasher';
|
||||
import { handleRequestFileData } from './handle-request-file-data';
|
||||
import { setupWorkspaceContext } from '../../utils/workspace-context';
|
||||
import { hashFile } from '../../native';
|
||||
|
||||
let performanceObserver: PerformanceObserver | undefined;
|
||||
let workspaceWatcherError: Error | undefined;
|
||||
@ -283,7 +285,7 @@ function lockFileHashChanged(): boolean {
|
||||
join(workspaceRoot, 'pnpm-lock.yaml'),
|
||||
]
|
||||
.filter((file) => existsSync(file))
|
||||
.map((file) => fileHasher.hashFile(file));
|
||||
.map((file) => hashFile(file));
|
||||
const newHash = hashArray(lockHashes);
|
||||
if (existingLockHash && newHash != existingLockHash) {
|
||||
existingLockHash = newHash;
|
||||
@ -397,6 +399,8 @@ const handleOutputsChanges: FileWatcherCallback = async (err, changeEvents) => {
|
||||
};
|
||||
|
||||
export async function startServer(): Promise<Server> {
|
||||
setupWorkspaceContext(workspaceRoot);
|
||||
|
||||
// Persist metadata about the background process so that it can be cleaned up later if needed
|
||||
await writeDaemonJsonProcessCache({
|
||||
processId: process.pid,
|
||||
@ -409,7 +413,6 @@ export async function startServer(): Promise<Server> {
|
||||
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
await fileHasher.ensureInitialized();
|
||||
server.listen(FULL_OS_SOCKET_PATH, async () => {
|
||||
try {
|
||||
serverLogger.log(`Started listening on: ${FULL_OS_SOCKET_PATH}`);
|
||||
|
||||
@ -1,89 +1,3 @@
|
||||
import { performance } from 'perf_hooks';
|
||||
import { workspaceRoot } from '../utils/workspace-root';
|
||||
import { FileData } from '../config/project-graph';
|
||||
|
||||
export class FileHasher {
|
||||
private fileHashes: Map<string, string>;
|
||||
private isInitialized = false;
|
||||
|
||||
async init(): Promise<void> {
|
||||
performance.mark('init hashing:start');
|
||||
// Import as needed. There is also an issue running unit tests in Nx repo if this is a top-level import.
|
||||
const { hashFiles } = require('../native');
|
||||
this.clear();
|
||||
const filesObject = hashFiles(workspaceRoot);
|
||||
this.fileHashes = new Map(Object.entries(filesObject));
|
||||
|
||||
performance.mark('init hashing:end');
|
||||
performance.measure(
|
||||
'init hashing',
|
||||
'init hashing:start',
|
||||
'init hashing:end'
|
||||
);
|
||||
}
|
||||
|
||||
hashFile(path: string): string {
|
||||
// Import as needed. There is also an issue running unit tests in Nx repo if this is a top-level import.
|
||||
const { hashFile } = require('../native');
|
||||
return hashFile(path).hash;
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.fileHashes = new Map<string, string>();
|
||||
this.isInitialized = false;
|
||||
}
|
||||
|
||||
async ensureInitialized() {
|
||||
if (!this.isInitialized) {
|
||||
await this.init();
|
||||
}
|
||||
}
|
||||
|
||||
async hashFiles(files: string[]): Promise<Map<string, string>> {
|
||||
const r = new Map<string, string>();
|
||||
for (let f of files) {
|
||||
r.set(f, this.hashFile(f));
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
allFileData(): FileData[] {
|
||||
const res = [];
|
||||
this.fileHashes.forEach((hash, file) => {
|
||||
res.push({
|
||||
file,
|
||||
hash,
|
||||
});
|
||||
});
|
||||
res.sort((x, y) => x.file.localeCompare(y.file));
|
||||
return res;
|
||||
}
|
||||
|
||||
incrementalUpdate(
|
||||
updatedFiles: Map<string, string>,
|
||||
deletedFiles: string[] = []
|
||||
): void {
|
||||
performance.mark('incremental hashing:start');
|
||||
|
||||
updatedFiles.forEach((hash, filename) => {
|
||||
this.fileHashes.set(filename, hash);
|
||||
});
|
||||
|
||||
for (const deletedFile of deletedFiles) {
|
||||
this.fileHashes.delete(deletedFile);
|
||||
}
|
||||
|
||||
performance.mark('incremental hashing:end');
|
||||
performance.measure(
|
||||
'incremental hashing',
|
||||
'incremental hashing:start',
|
||||
'incremental hashing:end'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export const fileHasher = new FileHasher();
|
||||
|
||||
export function hashArray(content: string[]): string {
|
||||
// Import as needed. There is also an issue running unit tests in Nx repo if this is a top-level import.
|
||||
const { hashArray } = require('../native');
|
||||
|
||||
@ -8,7 +8,6 @@ import {
|
||||
Hash,
|
||||
InProcessTaskHasher,
|
||||
} from './task-hasher';
|
||||
import { fileHasher } from './file-hasher';
|
||||
import { withEnvironmentVariables } from '../../internal-testing-utils/with-environment';
|
||||
|
||||
jest.mock('../utils/workspace-root', () => {
|
||||
@ -114,8 +113,7 @@ describe('TaskHasher', () => {
|
||||
{} as any,
|
||||
{
|
||||
runtimeCacheInputs: ['echo runtime456'],
|
||||
},
|
||||
fileHasher
|
||||
}
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask(
|
||||
@ -178,8 +176,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
},
|
||||
{} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask(
|
||||
@ -261,8 +258,7 @@ describe('TaskHasher', () => {
|
||||
prod: ['!{projectRoot}/**/*.spec.ts'],
|
||||
},
|
||||
} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask(
|
||||
@ -334,8 +330,7 @@ describe('TaskHasher', () => {
|
||||
prod: ['!{projectRoot}/**/*.spec.ts'],
|
||||
},
|
||||
} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
|
||||
const taskGraph = {
|
||||
@ -437,8 +432,7 @@ describe('TaskHasher', () => {
|
||||
prod: ['!{projectRoot}/**/*.spec.ts'],
|
||||
},
|
||||
} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
|
||||
const taskGraph = {
|
||||
@ -535,8 +529,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
},
|
||||
} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask(
|
||||
@ -594,8 +587,7 @@ describe('TaskHasher', () => {
|
||||
{
|
||||
runtimeCacheInputs: ['echo runtime123', 'echo runtime456'],
|
||||
selectivelyHashTsConfig: true,
|
||||
},
|
||||
fileHasher
|
||||
}
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask(
|
||||
@ -654,8 +646,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
|
||||
{} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
|
||||
const taskGraph = {
|
||||
@ -725,8 +716,7 @@ describe('TaskHasher', () => {
|
||||
{} as any,
|
||||
{
|
||||
runtimeCacheInputs: ['boom'],
|
||||
},
|
||||
fileHasher
|
||||
}
|
||||
);
|
||||
|
||||
try {
|
||||
@ -794,8 +784,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
|
||||
{} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask(
|
||||
@ -859,8 +848,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
|
||||
{} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask(
|
||||
@ -915,8 +903,7 @@ describe('TaskHasher', () => {
|
||||
dependencies: {},
|
||||
},
|
||||
{} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask(
|
||||
@ -999,8 +986,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
|
||||
{} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
}
|
||||
|
||||
@ -1142,8 +1128,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
},
|
||||
{} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask(
|
||||
@ -1285,8 +1270,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
|
||||
{} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
|
||||
const computeTaskHash = async (hasher, appName) => {
|
||||
@ -1356,8 +1340,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
|
||||
{} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask(
|
||||
@ -1437,8 +1420,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
|
||||
{} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask(
|
||||
@ -1516,8 +1498,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
|
||||
{} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
|
||||
const hash = await hasher.hashTask(
|
||||
@ -1634,8 +1615,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
},
|
||||
} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
|
||||
await tempFs.createFiles({
|
||||
@ -1769,8 +1749,7 @@ describe('TaskHasher', () => {
|
||||
},
|
||||
},
|
||||
} as any,
|
||||
{},
|
||||
fileHasher
|
||||
{}
|
||||
);
|
||||
|
||||
await tempFs.createFiles({
|
||||
|
||||
@ -14,13 +14,14 @@ import { hashTsConfig } from '../plugins/js/hasher/hasher';
|
||||
import { DaemonClient } from '../daemon/client/client';
|
||||
import { createProjectRootMappings } from '../project-graph/utils/find-project-for-path';
|
||||
import { findMatchingProjects } from '../utils/find-matching-projects';
|
||||
import { FileHasher, hashArray, hashObject } from './file-hasher';
|
||||
import { hashArray, hashObject } from './file-hasher';
|
||||
import { getOutputsForTargetAndConfiguration } from '../tasks-runner/utils';
|
||||
import { getHashEnv } from './set-hash-env';
|
||||
import { workspaceRoot } from '../utils/workspace-root';
|
||||
import { join, relative } from 'path';
|
||||
import { normalizePath } from '../utils/path';
|
||||
import { findAllProjectNodeDependencies } from '../utils/project-graph-utils';
|
||||
import { hashFile } from '../native';
|
||||
|
||||
type ExpandedSelfInput =
|
||||
| { fileset: string }
|
||||
@ -102,8 +103,7 @@ export class InProcessTaskHasher implements TaskHasher {
|
||||
private readonly allWorkspaceFiles: FileData[],
|
||||
private readonly projectGraph: ProjectGraph,
|
||||
private readonly nxJson: NxJsonConfiguration,
|
||||
private readonly options: any,
|
||||
private readonly fileHasher: FileHasher
|
||||
private readonly options: any
|
||||
) {
|
||||
const legacyRuntimeInputs = (
|
||||
this.options && this.options.runtimeCacheInputs
|
||||
@ -130,7 +130,6 @@ export class InProcessTaskHasher implements TaskHasher {
|
||||
this.projectFileMap,
|
||||
this.allWorkspaceFiles,
|
||||
this.projectGraph,
|
||||
this.fileHasher,
|
||||
{ selectivelyHashTsConfig: this.options.selectivelyHashTsConfig ?? false }
|
||||
);
|
||||
}
|
||||
@ -205,7 +204,6 @@ class TaskHasherImpl {
|
||||
private readonly projectFileMap: ProjectFileMap,
|
||||
private readonly allWorkspaceFiles: FileData[],
|
||||
private readonly projectGraph: ProjectGraph,
|
||||
private readonly fileHasher: FileHasher,
|
||||
private readonly options: { selectivelyHashTsConfig: boolean }
|
||||
) {
|
||||
// External Dependencies are all calculated up front in a deterministic order
|
||||
@ -412,7 +410,7 @@ class TaskHasherImpl {
|
||||
);
|
||||
const hashDetails = {};
|
||||
const hashes: string[] = [];
|
||||
for (const [file, hash] of await this.fileHasher.hashFiles(
|
||||
for (const [file, hash] of this.hashFiles(
|
||||
filteredFiles.map((p) => join(workspaceRoot, p))
|
||||
)) {
|
||||
hashes.push(hash);
|
||||
@ -437,6 +435,14 @@ class TaskHasherImpl {
|
||||
return partialHashes;
|
||||
}
|
||||
|
||||
private hashFiles(files: string[]): Map<string, string> {
|
||||
const r = new Map<string, string>();
|
||||
for (let f of files) {
|
||||
r.set(f, hashFile(f));
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
private getExternalDependencyHash(externalNodeName: string) {
|
||||
const combinedHash = this.combinePartialHashes(
|
||||
this.externalDependencyHashes.get(externalNodeName)
|
||||
|
||||
@ -1,4 +1,3 @@
|
||||
use crate::native::types::FileData;
|
||||
use crate::native::utils::path::Normalize;
|
||||
use crate::native::walker::nx_walker;
|
||||
use std::collections::HashMap;
|
||||
@ -12,14 +11,12 @@ pub fn hash_array(input: Vec<String>) -> String {
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn hash_file(file: String) -> Option<FileData> {
|
||||
let Ok(content) = std::fs::read(&file) else {
|
||||
pub fn hash_file(file: String) -> Option<String> {
|
||||
let Ok(content) = std::fs::read(file) else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let hash = xxh3::xxh3_64(&content).to_string();
|
||||
|
||||
Some(FileData { hash, file })
|
||||
Some(xxh3::xxh3_64(&content).to_string())
|
||||
}
|
||||
|
||||
#[napi]
|
||||
@ -72,6 +69,6 @@ mod tests {
|
||||
let test_file_path = temp_dir.display().to_string() + "/test.txt";
|
||||
let content = hash_file(test_file_path);
|
||||
|
||||
assert_eq!(content.unwrap().hash, "6193209363630369380");
|
||||
assert_eq!(content.unwrap(), "6193209363630369380");
|
||||
}
|
||||
}
|
||||
|
||||
28
packages/nx/src/native/index.d.ts
vendored
28
packages/nx/src/native/index.d.ts
vendored
@ -16,7 +16,7 @@ export function getFilesForOutputs(directory: string, entries: Array<string>): A
|
||||
export function remove(src: string): void
|
||||
export function copy(src: string, dest: string): void
|
||||
export function hashArray(input: Array<string>): string
|
||||
export function hashFile(file: string): FileData | null
|
||||
export function hashFile(file: string): string | null
|
||||
export function hashFiles(workspaceRoot: string): Record<string, string>
|
||||
export function findImports(projectFileMap: Record<string, Array<string>>): Array<ImportResult>
|
||||
export interface ExternalNodeData {
|
||||
@ -69,10 +69,6 @@ export interface FileData {
|
||||
file: string
|
||||
hash: string
|
||||
}
|
||||
/**
|
||||
* Newly created files will have the `update` EventType as well.
|
||||
* This simplifies logic between OS's, IDEs and git operations
|
||||
*/
|
||||
export const enum EventType {
|
||||
delete = 'delete',
|
||||
update = 'update',
|
||||
@ -87,21 +83,16 @@ export const enum WorkspaceErrors {
|
||||
ParseError = 'ParseError',
|
||||
Generic = 'Generic'
|
||||
}
|
||||
/** Get workspace config files based on provided globs */
|
||||
export function getProjectConfigurationFiles(workspaceRoot: string, globs: Array<string>): Array<string>
|
||||
/** Get workspace config files based on provided globs */
|
||||
export function getProjectConfigurations(workspaceRoot: string, globs: Array<string>, parseConfigurations: (arg0: Array<string>) => ConfigurationParserResult): ConfigurationParserResult
|
||||
export interface ConfigurationParserResult {
|
||||
projectNodes: Record<string, object>
|
||||
externalNodes: Record<string, object>
|
||||
}
|
||||
export interface NxWorkspaceFiles {
|
||||
projectFileMap: Record<string, Array<FileData>>
|
||||
globalFiles: Array<FileData>
|
||||
projectConfigurations: Record<string, object>
|
||||
externalNodes: Record<string, object>
|
||||
}
|
||||
export function getWorkspaceFilesNative(workspaceRoot: string, globs: Array<string>, parseConfigurations: (arg0: Array<string>) => ConfigurationParserResult): NxWorkspaceFiles
|
||||
export interface ConfigurationParserResult {
|
||||
projectNodes: Record<string, object>
|
||||
externalNodes: Record<string, object>
|
||||
}
|
||||
export class ImportResult {
|
||||
file: string
|
||||
sourceProject: string
|
||||
@ -118,3 +109,12 @@ export class Watcher {
|
||||
watch(callback: (err: string | null, events: WatchEvent[]) => void): void
|
||||
stop(): Promise<void>
|
||||
}
|
||||
export class WorkspaceContext {
|
||||
workspaceRoot: string
|
||||
constructor(workspaceRoot: string)
|
||||
getWorkspaceFiles(globs: Array<string>, parseConfigurations: (arg0: Array<string>) => ConfigurationParserResult): NxWorkspaceFiles
|
||||
getProjectConfigurationFiles(globs: Array<string>): Array<string>
|
||||
getProjectConfigurations(globs: Array<string>, parseConfigurations: (arg0: Array<string>) => ConfigurationParserResult): ConfigurationParserResult
|
||||
incrementalUpdate(updatedFiles: Array<string>, deletedFiles: Array<string>): Record<string, string>
|
||||
allFileData(): Array<FileData>
|
||||
}
|
||||
|
||||
@ -246,7 +246,7 @@ if (!nativeBinding) {
|
||||
throw new Error(`Failed to load native binding`)
|
||||
}
|
||||
|
||||
const { expandOutputs, getFilesForOutputs, remove, copy, hashArray, hashFile, hashFiles, ImportResult, findImports, EventType, Watcher, WorkspaceErrors, getProjectConfigurationFiles, getProjectConfigurations, getWorkspaceFilesNative } = nativeBinding
|
||||
const { expandOutputs, getFilesForOutputs, remove, copy, hashArray, hashFile, hashFiles, ImportResult, findImports, EventType, Watcher, WorkspaceContext, WorkspaceErrors } = nativeBinding
|
||||
|
||||
module.exports.expandOutputs = expandOutputs
|
||||
module.exports.getFilesForOutputs = getFilesForOutputs
|
||||
@ -259,7 +259,5 @@ module.exports.ImportResult = ImportResult
|
||||
module.exports.findImports = findImports
|
||||
module.exports.EventType = EventType
|
||||
module.exports.Watcher = Watcher
|
||||
module.exports.WorkspaceContext = WorkspaceContext
|
||||
module.exports.WorkspaceErrors = WorkspaceErrors
|
||||
module.exports.getProjectConfigurationFiles = getProjectConfigurationFiles
|
||||
module.exports.getProjectConfigurations = getProjectConfigurations
|
||||
module.exports.getWorkspaceFilesNative = getWorkspaceFilesNative
|
||||
|
||||
@ -18,17 +18,23 @@ where
|
||||
) -> std::fmt::Result {
|
||||
// Format values from the event's's metadata:
|
||||
let metadata = event.metadata();
|
||||
let level = *metadata.level();
|
||||
|
||||
if metadata.level() != &Level::WARN && metadata.level() != &Level::TRACE {
|
||||
write!(&mut writer, "\n{} {} ", ">".cyan(), "NX".bold().cyan())?;
|
||||
}
|
||||
|
||||
if metadata.level() == &Level::TRACE {
|
||||
write!(
|
||||
&mut writer,
|
||||
"{}: ",
|
||||
format!("{}", metadata.level()).bold().red()
|
||||
)?;
|
||||
match level {
|
||||
Level::TRACE | Level::DEBUG => {
|
||||
write!(
|
||||
&mut writer,
|
||||
"{} {}: ",
|
||||
format!("{}", metadata.level()).bold().red(),
|
||||
metadata.target()
|
||||
)?;
|
||||
}
|
||||
Level::WARN => {
|
||||
write!(&mut writer, "\n{} {} ", ">".yellow(), "NX".bold().yellow())?;
|
||||
}
|
||||
_ => {
|
||||
write!(&mut writer, "\n{} {} ", ">".cyan(), "NX".bold().cyan())?;
|
||||
}
|
||||
}
|
||||
|
||||
// Format all the spans in the event's span context.
|
||||
@ -57,6 +63,10 @@ where
|
||||
// Write fields on the event
|
||||
ctx.field_format().format_fields(writer.by_ref(), event)?;
|
||||
|
||||
if !(matches!(level, Level::TRACE)) && !(matches!(level, Level::DEBUG)) {
|
||||
writeln!(&mut writer)?;
|
||||
}
|
||||
|
||||
writeln!(writer)
|
||||
}
|
||||
}
|
||||
|
||||
@ -12,7 +12,7 @@ describe('hasher', () => {
|
||||
const tempFilePath = join(tempDirPath, 'temp.txt');
|
||||
await writeFile(tempFilePath, 'content');
|
||||
|
||||
expect(hashFile(tempFilePath).hash).toBe('6193209363630369380');
|
||||
expect(hashFile(tempFilePath)).toBe('6193209363630369380');
|
||||
});
|
||||
|
||||
it('should hash content', async () => {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { getProjectConfigurations, getWorkspaceFilesNative } from '../index';
|
||||
import { WorkspaceContext } from '../index';
|
||||
import { TempFs } from '../../utils/testing/temp-fs';
|
||||
import { NxJsonConfiguration } from '../../config/nx-json';
|
||||
import { dirname, join } from 'path';
|
||||
@ -17,7 +17,7 @@ describe('workspace files', () => {
|
||||
}
|
||||
return {
|
||||
projectNodes: res,
|
||||
externalNodes: {}
|
||||
externalNodes: {},
|
||||
};
|
||||
};
|
||||
}
|
||||
@ -54,11 +54,11 @@ describe('workspace files', () => {
|
||||
'./libs/package-project/index.js': '',
|
||||
'./nested/non-project/file.txt': '',
|
||||
});
|
||||
|
||||
let globs = ['project.json', '**/project.json', 'libs/*/package.json'];
|
||||
|
||||
const context = new WorkspaceContext(fs.tempDir);
|
||||
let { projectFileMap, projectConfigurations, globalFiles } =
|
||||
getWorkspaceFilesNative(
|
||||
fs.tempDir,
|
||||
context.getWorkspaceFiles(
|
||||
globs,
|
||||
createParseConfigurationsFunction(fs.tempDir)
|
||||
);
|
||||
@ -179,9 +179,11 @@ describe('workspace files', () => {
|
||||
'./src/index.js': '',
|
||||
'./jest.config.js': '',
|
||||
});
|
||||
|
||||
const context = new WorkspaceContext(fs.tempDir);
|
||||
|
||||
const globs = ['project.json', '**/project.json', '**/package.json'];
|
||||
const { globalFiles, projectFileMap } = getWorkspaceFilesNative(
|
||||
fs.tempDir,
|
||||
const { globalFiles, projectFileMap } = context.getWorkspaceFiles(
|
||||
globs,
|
||||
createParseConfigurationsFunction(fs.tempDir)
|
||||
);
|
||||
@ -235,35 +237,33 @@ describe('workspace files', () => {
|
||||
'./libs/project1/index.js': '',
|
||||
});
|
||||
|
||||
const context = new WorkspaceContext(fs.tempDir);
|
||||
let globs = ['project.json', '**/project.json', '**/package.json'];
|
||||
|
||||
let nodes = getProjectConfigurations(
|
||||
fs.tempDir,
|
||||
globs,
|
||||
(filenames) => {
|
||||
const res = {};
|
||||
for (const filename of filenames) {
|
||||
const json = readJsonFile(join(fs.tempDir, filename));
|
||||
res[json.name] = {
|
||||
...json,
|
||||
root: dirname(filename),
|
||||
};
|
||||
}
|
||||
return {
|
||||
externalNodes: {}, projectNodes: res
|
||||
let nodes = context.getProjectConfigurations(globs, (filenames) => {
|
||||
const res = {};
|
||||
for (const filename of filenames) {
|
||||
const json = readJsonFile(join(fs.tempDir, filename));
|
||||
res[json.name] = {
|
||||
...json,
|
||||
root: dirname(filename),
|
||||
};
|
||||
}
|
||||
);
|
||||
return {
|
||||
externalNodes: {},
|
||||
projectNodes: res,
|
||||
};
|
||||
});
|
||||
expect(nodes.projectNodes).toEqual({
|
||||
"project1": {
|
||||
"name": "project1",
|
||||
"root": "libs/project1",
|
||||
},
|
||||
"repo-name": expect.objectContaining({
|
||||
"name": "repo-name",
|
||||
"root": ".",
|
||||
}),
|
||||
});
|
||||
project1: {
|
||||
name: 'project1',
|
||||
root: 'libs/project1',
|
||||
},
|
||||
'repo-name': expect.objectContaining({
|
||||
name: 'repo-name',
|
||||
root: '.',
|
||||
}),
|
||||
});
|
||||
});
|
||||
|
||||
// describe('errors', () => {
|
||||
|
||||
@ -8,8 +8,6 @@ use crate::native::watch::utils::transform_event;
|
||||
|
||||
#[napi(string_enum)]
|
||||
#[derive(Debug)]
|
||||
/// Newly created files will have the `update` EventType as well.
|
||||
/// This simplifies logic between OS's, IDEs and git operations
|
||||
pub enum EventType {
|
||||
#[allow(non_camel_case_types)]
|
||||
delete,
|
||||
|
||||
40
packages/nx/src/native/workspace/config_files.rs
Normal file
40
packages/nx/src/native/workspace/config_files.rs
Normal file
@ -0,0 +1,40 @@
|
||||
use crate::native::utils::glob::build_glob_set;
|
||||
use crate::native::utils::path::Normalize;
|
||||
use crate::native::workspace::types::ConfigurationParserResult;
|
||||
|
||||
use crate::native::workspace::errors::{InternalWorkspaceErrors, WorkspaceErrors};
|
||||
use rayon::prelude::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Get workspace config files based on provided globs
|
||||
pub(super) fn get_project_configuration_files(
|
||||
globs: Vec<String>,
|
||||
files: Option<&[(PathBuf, String)]>,
|
||||
) -> napi::Result<Vec<String>, WorkspaceErrors> {
|
||||
let Some(files) = files else {
|
||||
return Ok(Default::default())
|
||||
};
|
||||
|
||||
let globs =
|
||||
build_glob_set(&globs).map_err(|err| InternalWorkspaceErrors::Generic(err.to_string()))?;
|
||||
Ok(files
|
||||
.par_iter()
|
||||
.map(|file| file.0.to_normalized_string())
|
||||
.filter(|path| globs.is_match(path))
|
||||
.collect())
|
||||
}
|
||||
|
||||
/// Get workspace config files based on provided globs
|
||||
pub(super) fn get_project_configurations<ConfigurationParser>(
|
||||
globs: Vec<String>,
|
||||
files: Option<&[(PathBuf, String)]>,
|
||||
parse_configurations: ConfigurationParser,
|
||||
) -> napi::Result<ConfigurationParserResult>
|
||||
where
|
||||
ConfigurationParser: Fn(Vec<String>) -> napi::Result<ConfigurationParserResult>,
|
||||
{
|
||||
let config_paths =
|
||||
get_project_configuration_files(globs, files).map_err(anyhow::Error::from)?;
|
||||
|
||||
parse_configurations(config_paths)
|
||||
}
|
||||
225
packages/nx/src/native/workspace/context.rs
Normal file
225
packages/nx/src/native/workspace/context.rs
Normal file
@ -0,0 +1,225 @@
|
||||
use crate::native::logger::enable_logger;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::native::types::FileData;
|
||||
use crate::native::utils::path::Normalize;
|
||||
use parking_lot::lock_api::MutexGuard;
|
||||
use parking_lot::{Condvar, Mutex, RawMutex};
|
||||
use rayon::prelude::*;
|
||||
use std::ops::Deref;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
use std::thread;
|
||||
use tracing::{trace, warn};
|
||||
use xxhash_rust::xxh3;
|
||||
|
||||
use crate::native::walker::nx_walker;
|
||||
use crate::native::workspace::errors::WorkspaceErrors;
|
||||
use crate::native::workspace::workspace_files::NxWorkspaceFiles;
|
||||
use crate::native::workspace::{config_files, workspace_files};
|
||||
|
||||
use crate::native::workspace::types::ConfigurationParserResult;
|
||||
|
||||
#[napi]
|
||||
pub struct WorkspaceContext {
|
||||
pub workspace_root: String,
|
||||
workspace_root_path: PathBuf,
|
||||
files_worker: FilesWorker,
|
||||
}
|
||||
|
||||
type Files = Vec<(PathBuf, String)>;
|
||||
struct FilesWorker(Option<Arc<(Mutex<Files>, Condvar)>>);
|
||||
|
||||
impl FilesWorker {
|
||||
fn gather_files(workspace_root: &Path) -> Self {
|
||||
if !workspace_root.exists() {
|
||||
warn!(
|
||||
"workspace root does not exist: {}",
|
||||
workspace_root.display()
|
||||
);
|
||||
return FilesWorker(None);
|
||||
}
|
||||
|
||||
let files_lock = Arc::new((Mutex::new(Vec::new()), Condvar::new()));
|
||||
let files_lock_clone = Arc::clone(&files_lock);
|
||||
let workspace_root = workspace_root.to_owned();
|
||||
|
||||
thread::spawn(move || {
|
||||
trace!("locking files");
|
||||
let (lock, cvar) = &*files_lock_clone;
|
||||
let mut workspace_files = lock.lock();
|
||||
let files = nx_walker(workspace_root, |rec| {
|
||||
let mut file_hashes: Vec<(PathBuf, String)> = vec![];
|
||||
for (path, content) in rec {
|
||||
file_hashes.push((path, xxh3::xxh3_64(&content).to_string()));
|
||||
}
|
||||
file_hashes
|
||||
});
|
||||
|
||||
workspace_files.extend(files);
|
||||
workspace_files.par_sort();
|
||||
let files_len = workspace_files.len();
|
||||
trace!(?files_len, "files retrieved");
|
||||
|
||||
cvar.notify_all();
|
||||
});
|
||||
|
||||
FilesWorker(Some(files_lock))
|
||||
}
|
||||
|
||||
pub fn get_files(&self) -> Option<MutexGuard<'_, RawMutex, Files>> {
|
||||
let Some(files_sync) = &self.0 else {
|
||||
trace!("there were no files because the workspace root did not exist");
|
||||
return None
|
||||
};
|
||||
|
||||
let (files_lock, cvar) = &files_sync.deref();
|
||||
let mut files = files_lock.lock();
|
||||
let files_len = files.len();
|
||||
if files_len == 0 {
|
||||
trace!("waiting for files");
|
||||
cvar.wait(&mut files);
|
||||
}
|
||||
|
||||
trace!("files are available");
|
||||
Some(files)
|
||||
}
|
||||
|
||||
pub fn update_files(
|
||||
&self,
|
||||
workspace_root_path: &Path,
|
||||
updated_files: Vec<&str>,
|
||||
deleted_files: Vec<&str>,
|
||||
) -> HashMap<String, String> {
|
||||
let Some(files_sync) = &self.0 else {
|
||||
trace!("there were no files because the workspace root did not exist");
|
||||
return HashMap::new();
|
||||
};
|
||||
|
||||
let (files_lock, _) = &files_sync.deref();
|
||||
let mut files = files_lock.lock();
|
||||
let mut map: HashMap<PathBuf, String> = files.drain(..).collect();
|
||||
|
||||
for deleted_file in deleted_files {
|
||||
map.remove(&PathBuf::from(deleted_file));
|
||||
}
|
||||
|
||||
let updated_files_hashes: HashMap<String, String> = updated_files
|
||||
.par_iter()
|
||||
.filter_map(|path| {
|
||||
let full_path = workspace_root_path.join(path);
|
||||
let Ok( content ) = std::fs::read(full_path) else {
|
||||
trace!( "could not read file: ?full_path");
|
||||
return None;
|
||||
};
|
||||
Some((path.to_string(), xxh3::xxh3_64(&content).to_string()))
|
||||
})
|
||||
.collect();
|
||||
|
||||
for (file, hash) in &updated_files_hashes {
|
||||
map.entry(file.into())
|
||||
.and_modify(|e| *e = hash.clone())
|
||||
.or_insert(hash.clone());
|
||||
}
|
||||
|
||||
*files = map.into_iter().collect();
|
||||
files.par_sort();
|
||||
|
||||
updated_files_hashes
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl WorkspaceContext {
|
||||
#[napi(constructor)]
|
||||
pub fn new(workspace_root: String) -> Self {
|
||||
enable_logger();
|
||||
|
||||
trace!(?workspace_root);
|
||||
|
||||
let workspace_root_path = PathBuf::from(&workspace_root);
|
||||
|
||||
WorkspaceContext {
|
||||
files_worker: FilesWorker::gather_files(&workspace_root_path),
|
||||
workspace_root,
|
||||
workspace_root_path,
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn get_workspace_files<ConfigurationParser>(
|
||||
&self,
|
||||
globs: Vec<String>,
|
||||
parse_configurations: ConfigurationParser,
|
||||
) -> napi::Result<NxWorkspaceFiles, WorkspaceErrors>
|
||||
where
|
||||
ConfigurationParser: Fn(Vec<String>) -> napi::Result<ConfigurationParserResult>,
|
||||
{
|
||||
workspace_files::get_files(
|
||||
globs,
|
||||
parse_configurations,
|
||||
self.files_worker
|
||||
.get_files()
|
||||
.as_deref()
|
||||
.map(|files| files.as_slice()),
|
||||
)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn get_project_configuration_files(
|
||||
&self,
|
||||
globs: Vec<String>,
|
||||
) -> napi::Result<Vec<String>, WorkspaceErrors> {
|
||||
config_files::get_project_configuration_files(
|
||||
globs,
|
||||
self.files_worker
|
||||
.get_files()
|
||||
.as_deref()
|
||||
.map(|files| files.as_slice()),
|
||||
)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn get_project_configurations<ConfigurationParser>(
|
||||
&self,
|
||||
globs: Vec<String>,
|
||||
parse_configurations: ConfigurationParser,
|
||||
) -> napi::Result<ConfigurationParserResult>
|
||||
where
|
||||
ConfigurationParser: Fn(Vec<String>) -> napi::Result<ConfigurationParserResult>,
|
||||
{
|
||||
config_files::get_project_configurations(
|
||||
globs,
|
||||
self.files_worker
|
||||
.get_files()
|
||||
.as_deref()
|
||||
.map(|files| files.as_slice()),
|
||||
parse_configurations,
|
||||
)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn incremental_update(
|
||||
&self,
|
||||
updated_files: Vec<&str>,
|
||||
deleted_files: Vec<&str>,
|
||||
) -> HashMap<String, String> {
|
||||
self.files_worker
|
||||
.update_files(&self.workspace_root_path, updated_files, deleted_files)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn all_file_data(&self) -> Vec<FileData> {
|
||||
self.files_worker
|
||||
.get_files()
|
||||
.map_or_else(Vec::new, |files| {
|
||||
files
|
||||
.iter()
|
||||
.map(|(path, content)| FileData {
|
||||
file: path.to_normalized_string(),
|
||||
hash: content.clone(),
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1,5 +1,3 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use napi::bindgen_prelude::*;
|
||||
use thiserror::Error;
|
||||
|
||||
@ -23,19 +21,18 @@ impl AsRef<str> for WorkspaceErrors {
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum InternalWorkspaceErrors {
|
||||
#[error("{file}")]
|
||||
ParseError { file: PathBuf },
|
||||
#[error("{msg}")]
|
||||
Generic { msg: String },
|
||||
#[error("{0}")]
|
||||
ParseError(String),
|
||||
#[error("{0}")]
|
||||
Generic(String),
|
||||
}
|
||||
|
||||
impl From<InternalWorkspaceErrors> for napi::Error<WorkspaceErrors> {
|
||||
fn from(value: InternalWorkspaceErrors) -> Self {
|
||||
let msg = value.to_string();
|
||||
match value {
|
||||
InternalWorkspaceErrors::ParseError { file } => {
|
||||
Error::new(WorkspaceErrors::ParseError, file.display().to_string())
|
||||
}
|
||||
InternalWorkspaceErrors::Generic { msg } => Error::new(WorkspaceErrors::Generic, msg),
|
||||
InternalWorkspaceErrors::ParseError(_) => Error::new(WorkspaceErrors::ParseError, msg),
|
||||
InternalWorkspaceErrors::Generic(_) => Error::new(WorkspaceErrors::Generic, msg),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,48 +0,0 @@
|
||||
use crate::native::utils::glob::build_glob_set;
|
||||
use crate::native::utils::path::Normalize;
|
||||
use crate::native::walker::nx_walker;
|
||||
use crate::native::workspace::types::ConfigurationParserResult;
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[napi]
|
||||
/// Get workspace config files based on provided globs
|
||||
pub fn get_project_configuration_files(
|
||||
workspace_root: String,
|
||||
globs: Vec<String>,
|
||||
) -> napi::Result<Vec<String>> {
|
||||
let globs = build_glob_set(&globs)?;
|
||||
let config_paths: Vec<String> = nx_walker(workspace_root, move |rec| {
|
||||
let mut config_paths: Vec<PathBuf> = Vec::new();
|
||||
for (path, _) in rec {
|
||||
if globs.is_match(&path) {
|
||||
config_paths.push(path);
|
||||
}
|
||||
}
|
||||
|
||||
config_paths
|
||||
.into_iter()
|
||||
.map(|p| p.to_normalized_string())
|
||||
.collect()
|
||||
});
|
||||
|
||||
Ok(config_paths)
|
||||
}
|
||||
|
||||
#[napi]
|
||||
/// Get workspace config files based on provided globs
|
||||
pub fn get_project_configurations<ConfigurationParser>(
|
||||
workspace_root: String,
|
||||
globs: Vec<String>,
|
||||
parse_configurations: ConfigurationParser,
|
||||
) -> napi::Result<ConfigurationParserResult>
|
||||
where
|
||||
ConfigurationParser: Fn(Vec<String>) -> napi::Result<ConfigurationParserResult>,
|
||||
{
|
||||
let config_paths: Vec<String> = get_project_configuration_files(workspace_root, globs).unwrap();
|
||||
|
||||
parse_configurations(config_paths)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {}
|
||||
@ -1,4 +1,5 @@
|
||||
pub mod config_files;
|
||||
pub mod context;
|
||||
mod errors;
|
||||
pub mod get_config_files;
|
||||
pub mod get_nx_workspace_files;
|
||||
mod types;
|
||||
pub mod workspace_files;
|
||||
|
||||
@ -12,4 +12,4 @@ pub enum FileLocation {
|
||||
pub struct ConfigurationParserResult {
|
||||
pub project_nodes: HashMap<String, JsObject>,
|
||||
pub external_nodes: HashMap<String, JsObject>,
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,20 +1,18 @@
|
||||
use napi::JsObject;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use rayon::prelude::*;
|
||||
use tracing::trace;
|
||||
use xxhash_rust::xxh3;
|
||||
|
||||
use crate::native::logger::enable_logger;
|
||||
use crate::native::types::FileData;
|
||||
use crate::native::utils::glob::build_glob_set;
|
||||
use crate::native::utils::path::Normalize;
|
||||
use crate::native::walker::nx_walker;
|
||||
use crate::native::workspace::errors::WorkspaceErrors;
|
||||
use crate::native::workspace::config_files;
|
||||
use crate::native::workspace::errors::{InternalWorkspaceErrors, WorkspaceErrors};
|
||||
use crate::native::workspace::types::{ConfigurationParserResult, FileLocation};
|
||||
|
||||
#[napi(object)]
|
||||
#[derive(Default)]
|
||||
pub struct NxWorkspaceFiles {
|
||||
pub project_file_map: HashMap<String, Vec<FileData>>,
|
||||
pub global_files: Vec<FileData>,
|
||||
@ -22,44 +20,41 @@ pub struct NxWorkspaceFiles {
|
||||
pub external_nodes: HashMap<String, JsObject>,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn get_workspace_files_native<ConfigurationParser>(
|
||||
workspace_root: String,
|
||||
pub(super) fn get_files<ConfigurationParser>(
|
||||
globs: Vec<String>,
|
||||
parse_configurations: ConfigurationParser,
|
||||
file_data: Option<&[(PathBuf, String)]>,
|
||||
) -> napi::Result<NxWorkspaceFiles, WorkspaceErrors>
|
||||
where
|
||||
ConfigurationParser: Fn(Vec<String>) -> napi::Result<ConfigurationParserResult>,
|
||||
{
|
||||
enable_logger();
|
||||
let Some(file_data) = file_data else {
|
||||
return Ok(Default::default())
|
||||
};
|
||||
|
||||
trace!("{workspace_root}, {globs:?}");
|
||||
|
||||
let (projects, mut file_data) = get_file_data(&workspace_root, globs)
|
||||
.map_err(|err| napi::Error::new(WorkspaceErrors::Generic, err.to_string()))?;
|
||||
|
||||
let projects_vec: Vec<String> = projects.iter().map(|p| p.to_normalized_string()).collect();
|
||||
|
||||
let parsed_graph_nodes = parse_configurations(projects_vec)
|
||||
.map_err(|e| napi::Error::new(WorkspaceErrors::ParseError, e.to_string()))?;
|
||||
trace!("{globs:?}");
|
||||
let parsed_graph_nodes =
|
||||
config_files::get_project_configurations(globs, Some(file_data), parse_configurations)
|
||||
.map_err(|e| InternalWorkspaceErrors::ParseError(e.to_string()))?;
|
||||
|
||||
let root_map = create_root_map(&parsed_graph_nodes.project_nodes);
|
||||
|
||||
trace!(?root_map);
|
||||
|
||||
// Files need to be sorted each time because when we do hashArray in the TaskHasher.js, the order of the files should be deterministic
|
||||
file_data.par_sort();
|
||||
|
||||
let file_locations = file_data
|
||||
.into_par_iter()
|
||||
.map(|file_data| {
|
||||
let file_path = Path::new(&file_data.file);
|
||||
.map(|(file_path, hash)| {
|
||||
let mut parent = file_path.parent().unwrap_or_else(|| Path::new("."));
|
||||
|
||||
while root_map.get(parent).is_none() && parent != Path::new(".") {
|
||||
parent = parent.parent().unwrap_or_else(|| Path::new("."));
|
||||
}
|
||||
|
||||
let file_data = FileData {
|
||||
file: file_path.to_normalized_string(),
|
||||
hash: hash.clone(),
|
||||
};
|
||||
|
||||
match root_map.get(parent) {
|
||||
Some(project_name) => (FileLocation::Project(project_name.into()), file_data),
|
||||
None => (FileLocation::Global, file_data),
|
||||
@ -110,23 +105,3 @@ fn create_root_map(
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
type WorkspaceData = (HashSet<PathBuf>, Vec<FileData>);
|
||||
fn get_file_data(workspace_root: &str, globs: Vec<String>) -> anyhow::Result<WorkspaceData> {
|
||||
let globs = build_glob_set(&globs)?;
|
||||
let (projects, file_data) = nx_walker(workspace_root, move |rec| {
|
||||
let mut projects: HashSet<PathBuf> = HashSet::new();
|
||||
let mut file_hashes: Vec<FileData> = vec![];
|
||||
for (path, content) in rec {
|
||||
file_hashes.push(FileData {
|
||||
file: path.to_normalized_string(),
|
||||
hash: xxh3::xxh3_64(&content).to_string(),
|
||||
});
|
||||
if globs.is_match(&path) {
|
||||
projects.insert(path);
|
||||
}
|
||||
}
|
||||
(projects, file_hashes)
|
||||
});
|
||||
Ok((projects, file_data))
|
||||
}
|
||||
@ -6,8 +6,8 @@ import { buildExplicitPackageJsonDependencies } from './explicit-package-json-de
|
||||
import { ProjectGraphProjectNode } from '../../../../config/project-graph';
|
||||
import { ProjectGraphBuilder } from '../../../../project-graph/project-graph-builder';
|
||||
import { createProjectFileMap } from '../../../../project-graph/file-map-utils';
|
||||
import { fileHasher } from '../../../../hasher/file-hasher';
|
||||
import { CreateDependenciesContext } from '../../../../utils/nx-plugin';
|
||||
import { getAllFileDataInContext } from '../../../../utils/workspace-context';
|
||||
|
||||
describe('explicit package json dependencies', () => {
|
||||
let ctx: CreateDependenciesContext;
|
||||
@ -51,8 +51,6 @@ describe('explicit package json dependencies', () => {
|
||||
}),
|
||||
});
|
||||
|
||||
await fileHasher.init();
|
||||
|
||||
projects = {
|
||||
proj: {
|
||||
name: 'proj',
|
||||
@ -75,7 +73,7 @@ describe('explicit package json dependencies', () => {
|
||||
|
||||
const projectFileMap = createProjectFileMap(
|
||||
projectsConfigurations as any,
|
||||
fileHasher.allFileData()
|
||||
getAllFileDataInContext(tempFs.tempDir)
|
||||
).projectFileMap;
|
||||
|
||||
const builder = new ProjectGraphBuilder(undefined, projectFileMap);
|
||||
|
||||
@ -5,6 +5,7 @@ import { ProjectGraphBuilder } from '../../../../project-graph/project-graph-bui
|
||||
import { buildExplicitTypeScriptDependencies } from './explicit-project-dependencies';
|
||||
import { retrieveWorkspaceFiles } from '../../../../project-graph/utils/retrieve-workspace-files';
|
||||
import { CreateDependenciesContext } from '../../../../utils/nx-plugin';
|
||||
import { setupWorkspaceContext } from '../../../../utils/workspace-context';
|
||||
|
||||
// projectName => tsconfig import path
|
||||
const dependencyProjectNamesToImportPaths = {
|
||||
@ -559,6 +560,8 @@ async function createContext(
|
||||
...projectsFs,
|
||||
});
|
||||
|
||||
setupWorkspaceContext(tempFs.tempDir);
|
||||
|
||||
const { projectFileMap, projectConfigurations } =
|
||||
await retrieveWorkspaceFiles(tempFs.tempDir, nxJson);
|
||||
|
||||
|
||||
@ -13,7 +13,8 @@ import {
|
||||
} from '../config/workspace-json-project-json';
|
||||
import { daemonClient } from '../daemon/client/client';
|
||||
import { readProjectsConfigurationFromProjectGraph } from './project-graph';
|
||||
import { fileHasher } from '../hasher/file-hasher';
|
||||
import { getAllFileDataInContext } from '../utils/workspace-context';
|
||||
import { workspaceRoot } from '../utils/workspace-root';
|
||||
|
||||
export async function createProjectFileMapUsingProjectGraph(
|
||||
graph: ProjectGraph
|
||||
@ -24,8 +25,7 @@ export async function createProjectFileMapUsingProjectGraph(
|
||||
if (daemonClient.enabled()) {
|
||||
files = await daemonClient.getAllFileData();
|
||||
} else {
|
||||
await fileHasher.ensureInitialized();
|
||||
files = fileHasher.allFileData();
|
||||
files = getAllFileDataInContext(workspaceRoot);
|
||||
}
|
||||
|
||||
return createProjectFileMap(configs, files).projectFileMap;
|
||||
|
||||
@ -27,6 +27,11 @@ import {
|
||||
NxPluginV2,
|
||||
} from '../../utils/nx-plugin';
|
||||
import { CreateProjectJsonProjectsPlugin } from '../../plugins/project-json/build-nodes/project-json';
|
||||
import {
|
||||
getProjectConfigurationFilesFromContext,
|
||||
getProjectConfigurationsFromContext,
|
||||
getNxWorkspaceFilesFromContext,
|
||||
} from '../../utils/workspace-context';
|
||||
|
||||
/**
|
||||
* Walks the workspace directory to create the `projectFileMap`, `ProjectConfigurations` and `allWorkspaceFiles`
|
||||
@ -38,9 +43,6 @@ export async function retrieveWorkspaceFiles(
|
||||
workspaceRoot: string,
|
||||
nxJson: NxJsonConfiguration
|
||||
) {
|
||||
const { getWorkspaceFilesNative } =
|
||||
require('../../native') as typeof import('../../native');
|
||||
|
||||
performance.mark('native-file-deps:start');
|
||||
const plugins = await loadNxPlugins(
|
||||
nxJson?.plugins ?? [],
|
||||
@ -58,19 +60,23 @@ export async function retrieveWorkspaceFiles(
|
||||
performance.mark('get-workspace-files:start');
|
||||
|
||||
const { projectConfigurations, projectFileMap, globalFiles, externalNodes } =
|
||||
getWorkspaceFilesNative(workspaceRoot, globs, (configs: string[]) => {
|
||||
const projectConfigurations = createProjectConfigurations(
|
||||
workspaceRoot,
|
||||
nxJson,
|
||||
configs,
|
||||
plugins
|
||||
);
|
||||
getNxWorkspaceFilesFromContext(
|
||||
workspaceRoot,
|
||||
globs,
|
||||
(configs: string[]) => {
|
||||
const projectConfigurations = createProjectConfigurations(
|
||||
workspaceRoot,
|
||||
nxJson,
|
||||
configs,
|
||||
plugins
|
||||
);
|
||||
|
||||
return {
|
||||
projectNodes: projectConfigurations.projects,
|
||||
externalNodes: projectConfigurations.externalNodes,
|
||||
};
|
||||
}) as NxWorkspaceFiles;
|
||||
return {
|
||||
projectNodes: projectConfigurations.projects,
|
||||
externalNodes: projectConfigurations.externalNodes,
|
||||
};
|
||||
}
|
||||
) as NxWorkspaceFiles;
|
||||
performance.mark('get-workspace-files:end');
|
||||
performance.measure(
|
||||
'get-workspace-files',
|
||||
@ -165,21 +171,23 @@ function _retrieveProjectConfigurations(
|
||||
externalNodes: Record<string, ProjectGraphExternalNode>;
|
||||
projectNodes: Record<string, ProjectConfiguration>;
|
||||
} {
|
||||
const { getProjectConfigurations } =
|
||||
require('../../native') as typeof import('../../native');
|
||||
return getProjectConfigurations(workspaceRoot, globs, (configs: string[]) => {
|
||||
const projectConfigurations = createProjectConfigurations(
|
||||
workspaceRoot,
|
||||
nxJson,
|
||||
configs,
|
||||
plugins
|
||||
);
|
||||
return getProjectConfigurationsFromContext(
|
||||
workspaceRoot,
|
||||
globs,
|
||||
(configs: string[]) => {
|
||||
const projectConfigurations = createProjectConfigurations(
|
||||
workspaceRoot,
|
||||
nxJson,
|
||||
configs,
|
||||
plugins
|
||||
);
|
||||
|
||||
return {
|
||||
projectNodes: projectConfigurations.projects,
|
||||
externalNodes: projectConfigurations.externalNodes,
|
||||
};
|
||||
}) as {
|
||||
return {
|
||||
projectNodes: projectConfigurations.projects,
|
||||
externalNodes: projectConfigurations.externalNodes,
|
||||
};
|
||||
}
|
||||
) as {
|
||||
externalNodes: Record<string, ProjectGraphExternalNode>;
|
||||
projectNodes: Record<string, ProjectConfiguration>;
|
||||
};
|
||||
@ -193,17 +201,13 @@ export async function retrieveProjectConfigurationPaths(
|
||||
root,
|
||||
await loadNxPlugins(nxJson?.plugins ?? [], getNxRequirePaths(root), root)
|
||||
);
|
||||
const { getProjectConfigurationFiles } =
|
||||
require('../../native') as typeof import('../../native');
|
||||
return getProjectConfigurationFiles(root, projectGlobPatterns);
|
||||
return getProjectConfigurationFilesFromContext(root, projectGlobPatterns);
|
||||
}
|
||||
|
||||
export function retrieveProjectConfigurationPathsWithoutPluginInference(
|
||||
root: string
|
||||
): string[] {
|
||||
const { getProjectConfigurationFiles } =
|
||||
require('../../native') as typeof import('../../native');
|
||||
return getProjectConfigurationFiles(
|
||||
return getProjectConfigurationFilesFromContext(
|
||||
root,
|
||||
configurationGlobsWithoutPlugins(root)
|
||||
);
|
||||
@ -226,9 +230,7 @@ export function retrieveProjectConfigurationsWithoutPluginInference(
|
||||
return projectsWithoutPluginCache.get(cacheKey);
|
||||
}
|
||||
|
||||
const { getProjectConfigurations } =
|
||||
require('../../native') as typeof import('../../native');
|
||||
const projectConfigurations = getProjectConfigurations(
|
||||
const projectConfigurations = getProjectConfigurationsFromContext(
|
||||
root,
|
||||
projectGlobPatterns,
|
||||
(configs: string[]) => {
|
||||
@ -266,7 +268,7 @@ function buildAllWorkspaceFiles(
|
||||
return fileData;
|
||||
}
|
||||
|
||||
function createProjectConfigurations(
|
||||
export function createProjectConfigurations(
|
||||
workspaceRoot: string,
|
||||
nxJson: NxJsonConfiguration,
|
||||
configFiles: string[],
|
||||
|
||||
@ -32,7 +32,6 @@ import {
|
||||
import { hashTasksThatDoNotDependOnOutputsOfOtherTasks } from '../hasher/hash-task';
|
||||
import { daemonClient } from '../daemon/client/client';
|
||||
import { StoreRunInformationLifeCycle } from './life-cycles/store-run-information-life-cycle';
|
||||
import { fileHasher } from '../hasher/file-hasher';
|
||||
import { getProjectFileMap } from '../project-graph/build-project-graph';
|
||||
import { performance } from 'perf_hooks';
|
||||
|
||||
@ -241,8 +240,7 @@ export async function invokeTasksRunner({
|
||||
allWorkspaceFiles,
|
||||
projectGraph,
|
||||
nxJson,
|
||||
runnerOptions,
|
||||
fileHasher
|
||||
runnerOptions
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
import { FileData } from '../config/project-graph';
|
||||
import { daemonClient } from '../daemon/client/client';
|
||||
import { fileHasher } from '../hasher/file-hasher';
|
||||
import { getAllFileDataInContext } from './workspace-context';
|
||||
import { workspaceRoot } from './workspace-root';
|
||||
|
||||
export function allFileData(): Promise<FileData[]> {
|
||||
if (daemonClient.enabled()) {
|
||||
return daemonClient.getAllFileData();
|
||||
} else {
|
||||
fileHasher.ensureInitialized();
|
||||
return Promise.resolve(fileHasher.allFileData());
|
||||
return Promise.resolve(getAllFileDataInContext(workspaceRoot));
|
||||
}
|
||||
}
|
||||
|
||||
65
packages/nx/src/utils/workspace-context.ts
Normal file
65
packages/nx/src/utils/workspace-context.ts
Normal file
@ -0,0 +1,65 @@
|
||||
import type { ConfigurationParserResult, WorkspaceContext } from '../native';
|
||||
import { performance } from 'perf_hooks';
|
||||
|
||||
let workspaceContext: WorkspaceContext | undefined;
|
||||
|
||||
export function setupWorkspaceContext(workspaceRoot: string) {
|
||||
const { WorkspaceContext } =
|
||||
require('../native') as typeof import('../native');
|
||||
performance.mark('workspace-context');
|
||||
workspaceContext = new WorkspaceContext(workspaceRoot);
|
||||
performance.mark('workspace-context:end');
|
||||
performance.measure(
|
||||
'workspace context init',
|
||||
'workspace-context',
|
||||
'workspace-context:end'
|
||||
);
|
||||
}
|
||||
|
||||
export function getNxWorkspaceFilesFromContext(
|
||||
workspaceRoot: string,
|
||||
globs: string[],
|
||||
parseConfigurations: (files: string[]) => ConfigurationParserResult
|
||||
) {
|
||||
ensureContextAvailable(workspaceRoot);
|
||||
return workspaceContext.getWorkspaceFiles(globs, parseConfigurations);
|
||||
}
|
||||
|
||||
export function getProjectConfigurationFilesFromContext(
|
||||
workspaceRoot: string,
|
||||
globs: string[]
|
||||
) {
|
||||
ensureContextAvailable(workspaceRoot);
|
||||
return workspaceContext.getProjectConfigurationFiles(globs);
|
||||
}
|
||||
|
||||
export function getProjectConfigurationsFromContext(
|
||||
workspaceRoot: string,
|
||||
globs: string[],
|
||||
parseConfigurations: (files: string[]) => ConfigurationParserResult
|
||||
) {
|
||||
ensureContextAvailable(workspaceRoot);
|
||||
return workspaceContext.getProjectConfigurations(globs, parseConfigurations);
|
||||
}
|
||||
|
||||
export function updateFilesInContext(
|
||||
updatedFiles: string[],
|
||||
deletedFiles: string[]
|
||||
) {
|
||||
return workspaceContext?.incrementalUpdate(updatedFiles, deletedFiles);
|
||||
}
|
||||
|
||||
export function getAllFileDataInContext(workspaceRoot: string) {
|
||||
ensureContextAvailable(workspaceRoot);
|
||||
return workspaceContext.allFileData();
|
||||
}
|
||||
|
||||
function ensureContextAvailable(workspaceRoot: string) {
|
||||
if (!workspaceContext || workspaceContext?.workspaceRoot !== workspaceRoot) {
|
||||
setupWorkspaceContext(workspaceRoot);
|
||||
}
|
||||
}
|
||||
|
||||
export function resetWorkspaceContext() {
|
||||
workspaceContext = undefined;
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user