feat(core): lock graph creation when running in another process (#29408)

## Current Behavior
Running Nx in multiple processes at the same time with the daemon
disabled can cripple a system due to excess memory usage when creating
the graph. This is due to plugin workers being started per-parent
process when there is no daemon. This change enables a file lock to
prevent the simultaneous processing, and read from the cache when the
first run completes.

Currently, running `nx show projects` 30 times in parallel looks
something like this:

30 processes exited within 37535ms

## Expected Behavior
30 processes exited within 6435ms

## Test Script
```js
//@ts-check

const { spawn } = require('child_process');

let alive = new Set();

let start = Date.now();
let iterations = 30;

for (let i = 0; i < iterations; i++) {
  const cp = spawn('npx nx show projects', [], {
    shell: true,
    env: {
      ...process.env,
      NX_DAEMON: 'false',
      NX_VERBOSE_LOGGING: 'true',
    },
  });
  alive.add(i);
  //   cp.stdout.on('data', (data) => {
  //     console.log(`stdout [${i}]: ${data}`);
  //   });
  cp.stderr.on('data', (data) => {
    console.error(`stderr [${i}]: ${data}`);
  });
  cp.on('exit', (code) => {
    console.log(`child process ${i} exited with code ${code}`);
    alive.delete(i);
  });
}

const i = setInterval(() => {
  if (alive.size > 0) {
  } else {
    clearInterval(i);
    console.log(
      `${iterations} processes exited within ${Date.now() - start}ms`
    );
  }
}, 1);

```
This commit is contained in:
Craigory Coppola 2025-01-28 09:46:52 -05:00 committed by GitHub
parent cbbe14b8e5
commit 5721ea3c21
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 629 additions and 64 deletions

1
Cargo.lock generated
View File

@ -1509,6 +1509,7 @@ dependencies = [
"swc_ecma_visit",
"tempfile",
"thiserror",
"tokio",
"tracing",
"tracing-subscriber",
"walkdir",

View File

@ -20,6 +20,7 @@ It only uses language primitives and immutable objects
### Classes
- [AggregateCreateNodesError](../../devkit/documents/AggregateCreateNodesError)
- [StaleProjectGraphCacheError](../../devkit/documents/StaleProjectGraphCacheError)
### Interfaces

View File

@ -0,0 +1,144 @@
# Class: StaleProjectGraphCacheError
## Hierarchy
- `Error`
**`StaleProjectGraphCacheError`**
## Table of contents
### Constructors
- [constructor](../../devkit/documents/StaleProjectGraphCacheError#constructor)
### Properties
- [cause](../../devkit/documents/StaleProjectGraphCacheError#cause): unknown
- [message](../../devkit/documents/StaleProjectGraphCacheError#message): string
- [name](../../devkit/documents/StaleProjectGraphCacheError#name): string
- [stack](../../devkit/documents/StaleProjectGraphCacheError#stack): string
- [prepareStackTrace](../../devkit/documents/StaleProjectGraphCacheError#preparestacktrace): Function
- [stackTraceLimit](../../devkit/documents/StaleProjectGraphCacheError#stacktracelimit): number
### Methods
- [captureStackTrace](../../devkit/documents/StaleProjectGraphCacheError#capturestacktrace)
## Constructors
### constructor
**new StaleProjectGraphCacheError**(): [`StaleProjectGraphCacheError`](../../devkit/documents/StaleProjectGraphCacheError)
#### Returns
[`StaleProjectGraphCacheError`](../../devkit/documents/StaleProjectGraphCacheError)
#### Overrides
Error.constructor
## Properties
### cause
`Optional` **cause**: `unknown`
#### Inherited from
Error.cause
---
### message
**message**: `string`
#### Inherited from
Error.message
---
### name
**name**: `string`
#### Inherited from
Error.name
---
### stack
`Optional` **stack**: `string`
#### Inherited from
Error.stack
---
### prepareStackTrace
`Static` `Optional` **prepareStackTrace**: (`err`: `Error`, `stackTraces`: `CallSite`[]) => `any`
Optional override for formatting stack traces
**`See`**
https://v8.dev/docs/stack-trace-api#customizing-stack-traces
#### Type declaration
▸ (`err`, `stackTraces`): `any`
##### Parameters
| Name | Type |
| :------------ | :----------- |
| `err` | `Error` |
| `stackTraces` | `CallSite`[] |
##### Returns
`any`
#### Inherited from
Error.prepareStackTrace
---
### stackTraceLimit
`Static` **stackTraceLimit**: `number`
#### Inherited from
Error.stackTraceLimit
## Methods
### captureStackTrace
**captureStackTrace**(`targetObject`, `constructorOpt?`): `void`
Create .stack property on a target object
#### Parameters
| Name | Type |
| :---------------- | :--------- |
| `targetObject` | `object` |
| `constructorOpt?` | `Function` |
#### Returns
`void`
#### Inherited from
Error.captureStackTrace

View File

@ -1,9 +1,15 @@
# Function: readCachedProjectGraph
**readCachedProjectGraph**(): [`ProjectGraph`](../../devkit/documents/ProjectGraph)
**readCachedProjectGraph**(`minimumComputedAt?`): [`ProjectGraph`](../../devkit/documents/ProjectGraph)
Synchronously reads the latest cached copy of the workspace's ProjectGraph.
#### Parameters
| Name | Type | Description |
| :------------------- | :------- | :----------------------------------------------------------------------------- |
| `minimumComputedAt?` | `number` | The minimum timestamp that the cached ProjectGraph must have been computed at. |
#### Returns
[`ProjectGraph`](../../devkit/documents/ProjectGraph)

View File

@ -20,6 +20,7 @@ It only uses language primitives and immutable objects
### Classes
- [AggregateCreateNodesError](../../devkit/documents/AggregateCreateNodesError)
- [StaleProjectGraphCacheError](../../devkit/documents/StaleProjectGraphCacheError)
### Interfaces

View File

@ -40,7 +40,7 @@ export function parseTargetString(
targetString: string,
projectGraphOrCtx?: ProjectGraph | ExecutorContext
): Target {
let projectGraph =
let projectGraph: ProjectGraph =
projectGraphOrCtx && 'projectGraph' in projectGraphOrCtx
? projectGraphOrCtx.projectGraph
: (projectGraphOrCtx as ProjectGraph);

View File

@ -74,3 +74,5 @@ assert_fs = "1.0.10"
# This is only used for unit tests
swc_ecma_dep_graph = "0.109.1"
tempfile = "3.13.0"
# We only explicitly use tokio for async tests
tokio = "1.38.0"

View File

@ -299,7 +299,12 @@ async function processFilesAndCreateAndSerializeProjectGraph(
};
}
}
writeCache(
g.projectFileMapCache,
g.projectGraph,
projectConfigurationsResult.sourceMaps,
errors
);
if (errors.length > 0) {
return {
error: new DaemonProjectGraphError(
@ -316,7 +321,6 @@ async function processFilesAndCreateAndSerializeProjectGraph(
serializedSourceMaps: null,
};
} else {
writeCache(g.projectFileMapCache, g.projectGraph);
return g;
}
} catch (err) {

View File

@ -63,7 +63,10 @@ export type {
PostTasksExecutionContext,
} from './project-graph/plugins';
export { AggregateCreateNodesError } from './project-graph/error-types';
export {
AggregateCreateNodesError,
StaleProjectGraphCacheError,
} from './project-graph/error-types';
export { createNodesFromFiles } from './project-graph/plugins';

View File

@ -1,5 +1,4 @@
use crate::native::db::connection::NxDbConnection;
use fs4::fs_std::FileExt;
use rusqlite::{Connection, OpenFlags};
use std::fs::{remove_file, File};
use std::path::{Path, PathBuf};
@ -12,9 +11,7 @@ pub(super) struct LockFile {
pub(super) fn unlock_file(lock_file: &LockFile) {
if lock_file.path.exists() {
lock_file
.file
.unlock()
fs4::fs_std::FileExt::unlock(&lock_file.file)
.and_then(|_| remove_file(&lock_file.path))
.ok();
}
@ -26,8 +23,7 @@ pub(super) fn create_lock_file(db_path: &Path) -> anyhow::Result<LockFile> {
.map_err(|e| anyhow::anyhow!("Unable to create db lock file: {:?}", e))?;
trace!("Getting lock on db lock file");
lock_file
.lock_exclusive()
fs4::fs_std::FileExt::lock_exclusive(&lock_file)
.inspect(|_| trace!("Got lock on db lock file"))
.map_err(|e| anyhow::anyhow!("Unable to lock the db lock file: {:?}", e))?;
Ok(LockFile {
@ -77,7 +73,10 @@ pub(super) fn initialize_db(nx_version: String, db_path: &Path) -> anyhow::Resul
Ok(c)
}
Err(reason) => {
trace!("Unable to connect to existing database because: {:?}", reason);
trace!(
"Unable to connect to existing database because: {:?}",
reason
);
trace!("Removing existing incompatible database");
remove_file(db_path)?;

View File

@ -13,6 +13,15 @@ export declare class ChildProcess {
onOutput(callback: (message: string) => void): void
}
export declare class FileLock {
locked: boolean
constructor(lockFilePath: string)
unlock(): void
check(): boolean
wait(): Promise<void>
lock(): void
}
export declare class HashPlanner {
constructor(nxJson: NxJson, projectGraph: ExternalObject<ProjectGraph>)
getPlans(taskIds: Array<string>, taskGraph: TaskGraph): Record<string, string[]>

View File

@ -362,6 +362,7 @@ if (!nativeBinding) {
}
module.exports.ChildProcess = nativeBinding.ChildProcess
module.exports.FileLock = nativeBinding.FileLock
module.exports.HashPlanner = nativeBinding.HashPlanner
module.exports.ImportResult = nativeBinding.ImportResult
module.exports.NxCache = nativeBinding.NxCache

View File

@ -85,15 +85,18 @@ function __napi_rs_initialize_modules(__napiInstance) {
__napiInstance.exports['__napi_register__ExternalDependenciesInput_struct_36']?.()
__napiInstance.exports['__napi_register__DepsOutputsInput_struct_37']?.()
__napiInstance.exports['__napi_register__NxJson_struct_38']?.()
__napiInstance.exports['__napi_register__WorkspaceContext_struct_39']?.()
__napiInstance.exports['__napi_register__WorkspaceContext_impl_48']?.()
__napiInstance.exports['__napi_register__WorkspaceErrors_49']?.()
__napiInstance.exports['__napi_register__NxWorkspaceFiles_struct_50']?.()
__napiInstance.exports['__napi_register__NxWorkspaceFilesExternals_struct_51']?.()
__napiInstance.exports['__napi_register__UpdatedWorkspaceFiles_struct_52']?.()
__napiInstance.exports['__napi_register__FileMap_struct_53']?.()
__napiInstance.exports['__napi_register____test_only_transfer_file_map_54']?.()
__napiInstance.exports['__napi_register__FileLock_struct_39']?.()
__napiInstance.exports['__napi_register__FileLock_impl_41']?.()
__napiInstance.exports['__napi_register__WorkspaceContext_struct_42']?.()
__napiInstance.exports['__napi_register__WorkspaceContext_impl_51']?.()
__napiInstance.exports['__napi_register__WorkspaceErrors_52']?.()
__napiInstance.exports['__napi_register__NxWorkspaceFiles_struct_53']?.()
__napiInstance.exports['__napi_register__NxWorkspaceFilesExternals_struct_54']?.()
__napiInstance.exports['__napi_register__UpdatedWorkspaceFiles_struct_55']?.()
__napiInstance.exports['__napi_register__FileMap_struct_56']?.()
__napiInstance.exports['__napi_register____test_only_transfer_file_map_57']?.()
}
export const FileLock = __napiModule.exports.FileLock
export const HashPlanner = __napiModule.exports.HashPlanner
export const ImportResult = __napiModule.exports.ImportResult
export const TaskHasher = __napiModule.exports.TaskHasher

View File

@ -116,15 +116,18 @@ function __napi_rs_initialize_modules(__napiInstance) {
__napiInstance.exports['__napi_register__ExternalDependenciesInput_struct_36']?.()
__napiInstance.exports['__napi_register__DepsOutputsInput_struct_37']?.()
__napiInstance.exports['__napi_register__NxJson_struct_38']?.()
__napiInstance.exports['__napi_register__WorkspaceContext_struct_39']?.()
__napiInstance.exports['__napi_register__WorkspaceContext_impl_48']?.()
__napiInstance.exports['__napi_register__WorkspaceErrors_49']?.()
__napiInstance.exports['__napi_register__NxWorkspaceFiles_struct_50']?.()
__napiInstance.exports['__napi_register__NxWorkspaceFilesExternals_struct_51']?.()
__napiInstance.exports['__napi_register__UpdatedWorkspaceFiles_struct_52']?.()
__napiInstance.exports['__napi_register__FileMap_struct_53']?.()
__napiInstance.exports['__napi_register____test_only_transfer_file_map_54']?.()
__napiInstance.exports['__napi_register__FileLock_struct_39']?.()
__napiInstance.exports['__napi_register__FileLock_impl_41']?.()
__napiInstance.exports['__napi_register__WorkspaceContext_struct_42']?.()
__napiInstance.exports['__napi_register__WorkspaceContext_impl_51']?.()
__napiInstance.exports['__napi_register__WorkspaceErrors_52']?.()
__napiInstance.exports['__napi_register__NxWorkspaceFiles_struct_53']?.()
__napiInstance.exports['__napi_register__NxWorkspaceFilesExternals_struct_54']?.()
__napiInstance.exports['__napi_register__UpdatedWorkspaceFiles_struct_55']?.()
__napiInstance.exports['__napi_register__FileMap_struct_56']?.()
__napiInstance.exports['__napi_register____test_only_transfer_file_map_57']?.()
}
module.exports.FileLock = __napiModule.exports.FileLock
module.exports.HashPlanner = __napiModule.exports.HashPlanner
module.exports.ImportResult = __napiModule.exports.ImportResult
module.exports.TaskHasher = __napiModule.exports.TaskHasher

View File

@ -0,0 +1,20 @@
const { FileLock } = require('../../native-bindings.js');
const ora = require('ora');
const tmp = require('os').tmpdir();
(async () => {
const lock = new FileLock(
require('path').join(tmp, 'nx-unit-tests', 'file-lock-fixture')
);
if (lock.locked) {
const s = ora('Waiting for lock').start();
await lock.wait();
s.stop();
console.log('waited for lock');
} else {
await lock.lock();
await new Promise((resolve) => setTimeout(resolve, 5000));
console.log('ran with lock');
await lock.unlock();
}
})();

View File

@ -0,0 +1,44 @@
import { fork } from 'child_process';
import { join } from 'path';
describe('file-lock', () => {
it('should block the second call until the first one is done', async () => {
let combinedOutputs = [];
let a = fork(join(__dirname, './__fixtures__/file-lock.fixture.js'), {
env: {
LABEL: 'a',
NX_NATIVE_LOGGING: 'trace',
},
stdio: 'pipe',
execArgv: ['--require', 'ts-node/register'],
});
// Gives a bit of time to make the outputs of the tests more predictable...
// if both start at the same time, its hard to guarantee that a will get the lock before b.
await new Promise((r) => setTimeout(r, 500));
let b = fork(join(__dirname, './__fixtures__/file-lock.fixture.js'), {
env: {
LABEL: 'b',
NX_NATIVE_LOGGING: 'trace',
},
stdio: 'pipe',
execArgv: ['--require', 'ts-node/register'],
});
a.stdout.on('data', (data) => {
combinedOutputs.push('A: ' + data.toString().trim());
});
b.stdout.on('data', (data) => {
combinedOutputs.push('B: ' + data.toString().trim());
});
a.stderr.pipe(process.stderr);
b.stderr.pipe(process.stderr);
await Promise.all([a, b].map((p) => new Promise((r) => p.once('exit', r))));
expect(combinedOutputs).toContain('A: ran with lock');
expect(combinedOutputs).toContain('B: waited for lock');
});
});

View File

@ -0,0 +1,156 @@
use napi::bindgen_prelude::*;
use std::{
fs::{self, OpenOptions},
path::Path,
};
use tracing::trace;
#[cfg(not(target_arch = "wasm32"))]
use fs4::fs_std::FileExt;
#[napi]
pub struct FileLock {
#[napi]
pub locked: bool,
file: fs::File,
lock_file_path: String,
}
/// const lock = new FileLock('lockfile.lock');
/// if (lock.locked) {
/// lock.wait()
/// readFromCache()
/// } else {
/// lock.lock()
/// ... do some work
/// writeToCache()
/// lock.unlock()
/// }
#[napi]
#[cfg(not(target_arch = "wasm32"))]
impl FileLock {
#[napi(constructor)]
pub fn new(lock_file_path: String) -> anyhow::Result<Self> {
// Creates the directory where the lock file will be stored
fs::create_dir_all(Path::new(&lock_file_path).parent().unwrap())?;
// Opens the lock file
let file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(&lock_file_path)?;
trace!("Locking file {}", lock_file_path);
// Check if the file is locked
let file_lock: std::result::Result<(), std::io::Error> = file.try_lock_exclusive();
if file_lock.is_ok() {
// Checking if the file is locked, locks it, so unlock it.
fs4::fs_std::FileExt::unlock(&file)?;
}
Ok(Self {
file: file,
locked: file_lock.is_err(),
lock_file_path,
})
}
#[napi]
pub fn unlock(&mut self) -> Result<()> {
fs4::fs_std::FileExt::unlock(&self.file)?;
self.locked = false;
Ok(())
}
#[napi]
pub fn check(&mut self) -> Result<bool> {
// Check if the file is locked
let file_lock: std::result::Result<(), std::io::Error> = self.file.try_lock_exclusive();
if file_lock.is_ok() {
// Checking if the file is locked, locks it, so unlock it.
fs4::fs_std::FileExt::unlock(&self.file)?;
}
self.locked = file_lock.is_err();
Ok(self.locked)
}
#[napi(ts_return_type = "Promise<void>")]
pub fn wait(&mut self, env: Env) -> napi::Result<napi::JsObject> {
if self.locked {
let lock_file_path = self.lock_file_path.clone();
self.locked = false;
env.spawn_future(async move {
let file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(&lock_file_path)?;
fs4::fs_std::FileExt::lock_shared(&file)?;
fs4::fs_std::FileExt::unlock(&file)?;
Ok(())
})
} else {
env.spawn_future(async move { Ok(()) })
}
}
#[napi]
pub fn lock(&mut self) -> napi::Result<()> {
self.file.lock_exclusive()?;
self.locked = true;
Ok(())
}
}
#[napi]
#[cfg(target_arch = "wasm32")]
impl FileLock {
#[napi(constructor)]
pub fn new(lock_file_path: String) -> anyhow::Result<Self> {
anyhow::bail!("FileLock is not supported on WASM")
}
}
// TODO: Fix the tests
#[cfg(test)]
mod test {
use super::*;
use assert_fs::prelude::*;
use assert_fs::TempDir;
#[test]
fn test_new_lock() {
let tmp_dir = TempDir::new().unwrap();
let lock_file = tmp_dir.child("test_lock_file");
let lock_file_path = lock_file.path().to_path_buf();
let lock_file_path_str = lock_file_path.into_os_string().into_string().unwrap();
let mut file_lock = FileLock::new(lock_file_path_str).unwrap();
assert_eq!(file_lock.locked, false);
let _ = file_lock.lock();
assert_eq!(file_lock.locked, true);
assert!(lock_file.exists());
let _ = file_lock.unlock();
assert_eq!(file_lock.locked, false);
}
#[test]
fn test_drop() {
let tmp_dir = TempDir::new().unwrap();
let lock_file = tmp_dir.child("test_lock_file");
let lock_file_path = lock_file.path().to_path_buf();
let lock_file_path_str = lock_file_path.into_os_string().into_string().unwrap();
{
let mut file_lock = FileLock::new(lock_file_path_str.clone()).unwrap();
let _ = file_lock.lock();
}
let file_lock = FileLock::new(lock_file_path_str.clone());
assert_eq!(file_lock.unwrap().locked, false);
}
}

View File

@ -11,5 +11,6 @@ pub use normalize_trait::Normalize;
#[cfg_attr(target_arch = "wasm32", path = "atomics/wasm.rs")]
pub mod atomics;
pub mod ci;
pub mod file_lock;
pub use atomics::*;

View File

@ -6,22 +6,31 @@ import { ProjectConfiguration } from '../config/workspace-json-project-json';
import { ProjectGraph } from '../config/project-graph';
import { CreateNodesFunctionV2 } from './plugins/public-api';
export type ProjectGraphErrorTypes =
| AggregateCreateNodesError
| MergeNodesError
| CreateMetadataError
| ProjectsWithNoNameError
| MultipleProjectsWithSameNameError
| ProcessDependenciesError
| WorkspaceValidityError;
export class StaleProjectGraphCacheError extends Error {
constructor() {
super(
'The project graph cache was stale. Ensure that it has been recently created before using `readCachedProjectGraph`.'
);
}
}
export class ProjectGraphError extends Error {
readonly #partialProjectGraph: ProjectGraph;
readonly #partialSourceMaps: ConfigurationSourceMaps;
constructor(
private readonly errors: Array<
| AggregateCreateNodesError
| MergeNodesError
| ProjectsWithNoNameError
| MultipleProjectsWithSameNameError
| ProcessDependenciesError
| CreateMetadataError
| WorkspaceValidityError
>,
private readonly errors: Array<ProjectGraphErrorTypes>,
partialProjectGraph: ProjectGraph,
partialSourceMaps: ConfigurationSourceMaps
partialSourceMaps: ConfigurationSourceMaps | null
) {
const messageFragments = ['Failed to process project graph.'];
const mergeNodesErrors = [];

View File

@ -18,6 +18,12 @@ import {
} from '../utils/fileutils';
import { PackageJson } from '../utils/package-json';
import { nxVersion } from '../utils/versions';
import { ConfigurationSourceMaps } from './utils/project-configuration-utils';
import {
ProjectGraphError,
ProjectGraphErrorTypes,
StaleProjectGraphCacheError,
} from './error-types';
export interface FileMapCache {
version: string;
@ -34,6 +40,8 @@ export const nxProjectGraph = join(
);
export const nxFileMap = join(workspaceDataDirectory, 'file-map.json');
export const nxSourceMaps = join(workspaceDataDirectory, 'source-maps.json');
export function ensureCacheDirectory(): void {
try {
if (!existsSync(workspaceDataDirectory)) {
@ -77,27 +85,93 @@ export function readFileMapCache(): null | FileMapCache {
return data ?? null;
}
export function readProjectGraphCache(): null | ProjectGraph {
export function readProjectGraphCache(
minimumComputedAt?: number
): null | ProjectGraph {
performance.mark('read project-graph:start');
ensureCacheDirectory();
let data = null;
try {
if (fileExists(nxProjectGraph)) {
data = readJsonFile(nxProjectGraph);
const {
computedAt,
errors,
...projectGraphCache
}: ProjectGraph & {
errors?: Error[];
computedAt?: number;
} = readJsonFile(nxProjectGraph);
if (
minimumComputedAt &&
(!computedAt || computedAt < minimumComputedAt)
) {
throw new StaleProjectGraphCacheError();
}
if (errors && errors.length > 0) {
if (!minimumComputedAt) {
// If you didn't pass minimum computed at, we do not know if
// the errors on the cached graph would be relevant to what you
// are running. Prior to adding error handling here, the graph
// would not have been written to the cache. As such, this matches
// existing behavior of the public API.
return null;
}
throw new ProjectGraphError(
errors,
projectGraphCache,
readSourceMapsCache()
);
}
return projectGraphCache;
} else {
return null;
}
} catch (error) {
if (
error instanceof StaleProjectGraphCacheError ||
error instanceof ProjectGraphError
) {
throw error;
}
console.log(
`Error reading '${nxProjectGraph}'. Continue the process without the cache.`
);
console.log(error);
return null;
} finally {
performance.mark('read project-graph:end');
performance.measure(
'read cache',
'read project-graph:start',
'read project-graph:end'
);
}
}
export function readSourceMapsCache(): null | ConfigurationSourceMaps {
performance.mark('read source-maps:start');
ensureCacheDirectory();
let data = null;
try {
if (fileExists(nxSourceMaps)) {
data = readJsonFile(nxSourceMaps);
}
} catch (error) {
console.log(
`Error reading '${nxSourceMaps}'. Continue the process without the cache.`
);
console.log(error);
}
performance.mark('read project-graph:end');
performance.mark('read source-maps:end');
performance.measure(
'read cache',
'read project-graph:start',
'read project-graph:end'
'read source-maps:start',
'read source-maps:end'
);
return data ?? null;
}
@ -123,7 +197,9 @@ export function createProjectFileMapCache(
export function writeCache(
cache: FileMapCache,
projectGraph: ProjectGraph
projectGraph: ProjectGraph,
sourceMaps: ConfigurationSourceMaps,
errors: ProjectGraphErrorTypes[]
): void {
performance.mark('write cache:start');
let retry = 1;
@ -137,13 +213,21 @@ export function writeCache(
const unique = (Math.random().toString(16) + '0000000').slice(2, 10);
const tmpProjectGraphPath = `${nxProjectGraph}~${unique}`;
const tmpFileMapPath = `${nxFileMap}~${unique}`;
const tmpSourceMapPath = `${nxSourceMaps}~${unique}`;
try {
writeJsonFile(tmpProjectGraphPath, projectGraph);
writeJsonFile(tmpProjectGraphPath, {
...projectGraph,
errors,
computedAt: Date.now(),
});
renameSync(tmpProjectGraphPath, nxProjectGraph);
writeJsonFile(tmpFileMapPath, cache);
renameSync(tmpFileMapPath, nxFileMap);
writeJsonFile(tmpSourceMapPath, sourceMaps);
renameSync(tmpSourceMapPath, nxSourceMaps);
done = true;
} catch (err: any) {
if (err instanceof Error) {

View File

@ -21,10 +21,12 @@ import {
isAggregateProjectGraphError,
ProjectConfigurationsError,
ProjectGraphError,
StaleProjectGraphCacheError,
} from './error-types';
import {
readFileMapCache,
readProjectGraphCache,
readSourceMapsCache,
writeCache,
} from './nx-deps-cache';
import { ConfigurationResult } from './utils/project-configuration-utils';
@ -34,13 +36,21 @@ import {
} from './utils/retrieve-workspace-files';
import { getPlugins } from './plugins/get-plugins';
import { logger } from '../utils/logger';
import { FileLock, IS_WASM } from '../native';
import { join } from 'path';
import { workspaceDataDirectory } from '../utils/cache-directory';
import { DelayedSpinner } from '../utils/delayed-spinner';
/**
* Synchronously reads the latest cached copy of the workspace's ProjectGraph.
*
* @param {number} [minimumComputedAt] - The minimum timestamp that the cached ProjectGraph must have been computed at.
* @throws {Error} if there is no cached ProjectGraph to read from
*/
export function readCachedProjectGraph(): ProjectGraph {
const projectGraphCache: ProjectGraph = readProjectGraphCache();
export function readCachedProjectGraph(
minimumComputedAt?: number
): ProjectGraph {
const projectGraphCache = readProjectGraphCache(minimumComputedAt);
if (!projectGraphCache) {
const angularSpecificError = fileExists(`${workspaceRoot}/angular.json`)
? stripIndents`
@ -163,12 +173,13 @@ export async function buildProjectGraphAndSourceMapsWithoutDaemon() {
...(projectGraphError?.errors ?? []),
];
if (cacheEnabled) {
writeCache(projectFileMapCache, projectGraph, sourceMaps, errors);
}
if (errors.length > 0) {
throw new ProjectGraphError(errors, projectGraph, sourceMaps);
} else {
if (cacheEnabled) {
writeCache(projectFileMapCache, projectGraph);
}
return { projectGraph, sourceMaps };
}
}
@ -203,6 +214,20 @@ export function handleProjectGraphError(opts: { exitOnError: boolean }, e) {
}
}
async function readCachedGraphAndHydrateFileMap(minimumComputedAt?: number) {
const graph = readCachedProjectGraph(minimumComputedAt);
const projectRootMap = Object.fromEntries(
Object.entries(graph.nodes).map(([project, { data }]) => [
data.root,
project,
])
);
const { allWorkspaceFiles, fileMap, rustReferences } =
await retrieveWorkspaceFiles(workspaceRoot, projectRootMap);
hydrateFileMap(fileMap, allWorkspaceFiles, rustReferences);
return graph;
}
/**
* Computes and returns a ProjectGraph.
*
@ -232,19 +257,13 @@ export async function createProjectGraphAsync(
): Promise<ProjectGraph> {
if (process.env.NX_FORCE_REUSE_CACHED_GRAPH === 'true') {
try {
const graph = readCachedProjectGraph();
const projectRootMap = Object.fromEntries(
Object.entries(graph.nodes).map(([project, { data }]) => [
data.root,
project,
])
);
const { allWorkspaceFiles, fileMap, rustReferences } =
await retrieveWorkspaceFiles(workspaceRoot, projectRootMap);
hydrateFileMap(fileMap, allWorkspaceFiles, rustReferences);
return graph;
// If no cached graph is found, we will fall through to the normal flow
const graph = await readCachedGraphAndHydrateFileMap();
return graph;
} catch (e) {
if (e instanceof ProjectGraphError) {
throw e;
}
logger.verbose('Unable to use cached project graph', e);
}
}
@ -264,6 +283,57 @@ export async function createProjectGraphAndSourceMapsAsync(
performance.mark('create-project-graph-async:start');
if (!daemonClient.enabled()) {
const lock = !IS_WASM
? new FileLock(join(workspaceDataDirectory, 'project-graph.lock'))
: null;
let locked = lock?.locked;
while (locked) {
logger.verbose(
'Waiting for graph construction in another process to complete'
);
const spinner = new DelayedSpinner(
'Waiting for graph construction in another process to complete'
);
const start = Date.now();
await lock.wait();
spinner.cleanup();
// Note: This will currently throw if any of the caches are missing...
// It would be nice if one of the processes that was waiting for the lock
// could pick up the slack and build the graph if it's missing, but
// we wouldn't want either of the below to happen:
// - All of the waiting processes to build the graph
// - Even one of the processes building the graph on a legitimate error
try {
// Ensuring that computedAt was after this process started
// waiting for the graph to complete, means that the graph
// was computed by the process was already working.
const graph = await readCachedGraphAndHydrateFileMap(start);
const sourceMaps = readSourceMapsCache();
if (!sourceMaps) {
throw new Error(
'The project graph was computed in another process, but the source maps are missing.'
);
}
return {
projectGraph: graph,
sourceMaps,
};
} catch (e) {
// If the error is that the cached graph is stale after unlock,
// the process that was working on the graph must have been canceled,
// so we will fall through to the normal flow to ensure
// its created by one of the processes that was waiting
if (!(e instanceof StaleProjectGraphCacheError)) {
throw e;
}
}
locked = lock.check();
}
lock?.lock();
try {
const res = await buildProjectGraphAndSourceMapsWithoutDaemon();
performance.measure(
@ -290,6 +360,8 @@ export async function createProjectGraphAndSourceMapsAsync(
return res;
} catch (e) {
handleProjectGraphError(opts, e);
} finally {
lock.unlock();
}
} else {
try {

View File

@ -26,7 +26,7 @@ export function projectHasTargetAndConfiguration(
export function getSourceDirOfDependentProjects(
projectName: string,
projectGraph = readCachedProjectGraph()
projectGraph: ProjectGraph = readCachedProjectGraph()
): [projectDirs: string[], warnings: string[]] {
if (!projectGraph.nodes[projectName]) {
throw new Error(
@ -58,7 +58,7 @@ export function getSourceDirOfDependentProjects(
*/
export function findAllProjectNodeDependencies(
parentNodeName: string,
projectGraph = readCachedProjectGraph(),
projectGraph: ProjectGraph = readCachedProjectGraph(),
includeExternalDependencies = false
): string[] {
const dependencyNodeNames = new Set<string>();

View File

@ -11,6 +11,8 @@
"**/*.spec.tsx",
"**/*.spec.js",
"**/*.spec.jsx",
"**/*.fixture.js",
"**/*.fixture.ts",
"**/*.d.ts",
"./src/internal-testing-utils/**/*.ts",
"jest.config.ts"