fix(core): run discrete tasks using batches if possible (#30991)
<!-- Please make sure you have read the submission guidelines before posting an PR --> <!-- https://github.com/nrwl/nx/blob/master/CONTRIBUTING.md#-submitting-a-pr --> <!-- Please make sure that your commit message follows our format --> <!-- Example: `fix(nx): must begin with lowercase` --> <!-- If this is a particularly complex change or feature addition, you can request a dedicated Nx release for this pull request branch. Mention someone from the Nx team or the `@nrwl/nx-pipelines-reviewers` and they will confirm if the PR warrants its own release for testing purposes, and generate it for you if appropriate. --> ## Current Behavior <!-- This is the behavior we have today --> Discrete tasks are not run with batches. ## Expected Behavior <!-- This is the behavior we should expect with the changes in this PR --> Discrete tasks are run as batches if possible. ## Related Issue(s) <!-- Please link the issue being fixed so it gets closed when this is merged. --> Fixes #
This commit is contained in:
parent
110614da07
commit
cd2e35d402
@ -157,7 +157,7 @@ async function createOrchestrator(
|
|||||||
|
|
||||||
await orchestrator.init();
|
await orchestrator.init();
|
||||||
|
|
||||||
orchestrator.processTasks(tasks.map((task) => task.id));
|
orchestrator.processAllScheduledTasks();
|
||||||
|
|
||||||
return orchestrator;
|
return orchestrator;
|
||||||
}
|
}
|
||||||
@ -168,7 +168,7 @@ export async function runDiscreteTasks(
|
|||||||
taskGraphForHashing: TaskGraph,
|
taskGraphForHashing: TaskGraph,
|
||||||
nxJson: NxJsonConfiguration,
|
nxJson: NxJsonConfiguration,
|
||||||
lifeCycle: LifeCycle
|
lifeCycle: LifeCycle
|
||||||
) {
|
): Promise<Array<Promise<TaskResult[]>>> {
|
||||||
const orchestrator = await createOrchestrator(
|
const orchestrator = await createOrchestrator(
|
||||||
tasks,
|
tasks,
|
||||||
projectGraph,
|
projectGraph,
|
||||||
@ -176,9 +176,36 @@ export async function runDiscreteTasks(
|
|||||||
nxJson,
|
nxJson,
|
||||||
lifeCycle
|
lifeCycle
|
||||||
);
|
);
|
||||||
return tasks.map((task, index) =>
|
|
||||||
orchestrator.applyFromCacheOrRunTask(true, task, index)
|
let groupId = 0;
|
||||||
);
|
let nextBatch = orchestrator.nextBatch();
|
||||||
|
let batchResults: Array<Promise<TaskResult[]>> = [];
|
||||||
|
/**
|
||||||
|
* Set of task ids that were part of batches
|
||||||
|
*/
|
||||||
|
const batchTasks = new Set<string>();
|
||||||
|
|
||||||
|
while (nextBatch) {
|
||||||
|
for (const task in nextBatch.taskGraph.tasks) {
|
||||||
|
batchTasks.add(task);
|
||||||
|
}
|
||||||
|
|
||||||
|
batchResults.push(
|
||||||
|
orchestrator.applyFromCacheOrRunBatch(true, nextBatch, groupId++)
|
||||||
|
);
|
||||||
|
nextBatch = orchestrator.nextBatch();
|
||||||
|
}
|
||||||
|
|
||||||
|
const taskResults = tasks
|
||||||
|
// Filter out tasks which were not part of batches
|
||||||
|
.filter((task) => !batchTasks.has(task.id))
|
||||||
|
.map((task) =>
|
||||||
|
orchestrator
|
||||||
|
.applyFromCacheOrRunTask(true, task, groupId++)
|
||||||
|
.then((r) => [r])
|
||||||
|
);
|
||||||
|
|
||||||
|
return [...batchResults, ...taskResults];
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function runContinuousTasks(
|
export async function runContinuousTasks(
|
||||||
|
|||||||
@ -25,7 +25,7 @@ import { Cache, DbCache, dbCacheEnabled, getCache } from './cache';
|
|||||||
import { DefaultTasksRunnerOptions } from './default-tasks-runner';
|
import { DefaultTasksRunnerOptions } from './default-tasks-runner';
|
||||||
import { ForkedProcessTaskRunner } from './forked-process-task-runner';
|
import { ForkedProcessTaskRunner } from './forked-process-task-runner';
|
||||||
import { isTuiEnabled } from './is-tui-enabled';
|
import { isTuiEnabled } from './is-tui-enabled';
|
||||||
import { TaskMetadata } from './life-cycle';
|
import { TaskMetadata, TaskResult } from './life-cycle';
|
||||||
import { PseudoTtyProcess } from './pseudo-terminal';
|
import { PseudoTtyProcess } from './pseudo-terminal';
|
||||||
import { NoopChildProcess } from './running-tasks/noop-child-process';
|
import { NoopChildProcess } from './running-tasks/noop-child-process';
|
||||||
import { RunningTask } from './running-tasks/running-task';
|
import { RunningTask } from './running-tasks/running-task';
|
||||||
@ -108,7 +108,9 @@ export class TaskOrchestrator {
|
|||||||
// Init the ForkedProcessTaskRunner, TasksSchedule, and Cache
|
// Init the ForkedProcessTaskRunner, TasksSchedule, and Cache
|
||||||
await Promise.all([
|
await Promise.all([
|
||||||
this.forkedProcessTaskRunner.init(),
|
this.forkedProcessTaskRunner.init(),
|
||||||
this.tasksSchedule.init(),
|
this.tasksSchedule.init().then(() => {
|
||||||
|
return this.tasksSchedule.scheduleNextTasks();
|
||||||
|
}),
|
||||||
'init' in this.cache ? this.cache.init() : null,
|
'init' in this.cache ? this.cache.init() : null,
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
@ -116,9 +118,6 @@ export class TaskOrchestrator {
|
|||||||
async run() {
|
async run() {
|
||||||
await this.init();
|
await this.init();
|
||||||
|
|
||||||
// initial scheduling
|
|
||||||
await this.tasksSchedule.scheduleNextTasks();
|
|
||||||
|
|
||||||
performance.mark('task-execution:start');
|
performance.mark('task-execution:start');
|
||||||
|
|
||||||
const threadCount = getThreadCount(this.options, this.taskGraph);
|
const threadCount = getThreadCount(this.options, this.taskGraph);
|
||||||
@ -160,6 +159,10 @@ export class TaskOrchestrator {
|
|||||||
return this.completedTasks;
|
return this.completedTasks;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public nextBatch() {
|
||||||
|
return this.tasksSchedule.nextBatch();
|
||||||
|
}
|
||||||
|
|
||||||
private async executeNextBatchOfTasksUsingTaskSchedule() {
|
private async executeNextBatchOfTasksUsingTaskSchedule() {
|
||||||
// completed all the tasks
|
// completed all the tasks
|
||||||
if (!this.tasksSchedule.hasTasks() || this.bailed) {
|
if (!this.tasksSchedule.hasTasks() || this.bailed) {
|
||||||
@ -171,7 +174,7 @@ export class TaskOrchestrator {
|
|||||||
this.options.skipNxCache === undefined;
|
this.options.skipNxCache === undefined;
|
||||||
|
|
||||||
this.processAllScheduledTasks();
|
this.processAllScheduledTasks();
|
||||||
const batch = this.tasksSchedule.nextBatch();
|
const batch = this.nextBatch();
|
||||||
if (batch) {
|
if (batch) {
|
||||||
const groupId = this.closeGroup();
|
const groupId = this.closeGroup();
|
||||||
|
|
||||||
@ -203,7 +206,7 @@ export class TaskOrchestrator {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
processTasks(taskIds: string[]) {
|
private processTasks(taskIds: string[]) {
|
||||||
for (const taskId of taskIds) {
|
for (const taskId of taskIds) {
|
||||||
// Task is already handled or being handled
|
// Task is already handled or being handled
|
||||||
if (!this.processedTasks.has(taskId)) {
|
if (!this.processedTasks.has(taskId)) {
|
||||||
@ -251,7 +254,7 @@ export class TaskOrchestrator {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
private processAllScheduledTasks() {
|
public processAllScheduledTasks() {
|
||||||
const { scheduledTasks, scheduledBatches } =
|
const { scheduledTasks, scheduledBatches } =
|
||||||
this.tasksSchedule.getAllScheduledTasks();
|
this.tasksSchedule.getAllScheduledTasks();
|
||||||
|
|
||||||
@ -264,13 +267,7 @@ export class TaskOrchestrator {
|
|||||||
// endregion Processing Scheduled Tasks
|
// endregion Processing Scheduled Tasks
|
||||||
|
|
||||||
// region Applying Cache
|
// region Applying Cache
|
||||||
private async applyCachedResults(tasks: Task[]): Promise<
|
private async applyCachedResults(tasks: Task[]): Promise<TaskResult[]> {
|
||||||
{
|
|
||||||
task: Task;
|
|
||||||
code: number;
|
|
||||||
status: 'local-cache' | 'local-cache-kept-existing' | 'remote-cache';
|
|
||||||
}[]
|
|
||||||
> {
|
|
||||||
const cacheableTasks = tasks.filter((t) =>
|
const cacheableTasks = tasks.filter((t) =>
|
||||||
isCacheableTask(t, this.options)
|
isCacheableTask(t, this.options)
|
||||||
);
|
);
|
||||||
@ -319,11 +316,11 @@ export class TaskOrchestrator {
|
|||||||
// endregion Applying Cache
|
// endregion Applying Cache
|
||||||
|
|
||||||
// region Batch
|
// region Batch
|
||||||
private async applyFromCacheOrRunBatch(
|
public async applyFromCacheOrRunBatch(
|
||||||
doNotSkipCache: boolean,
|
doNotSkipCache: boolean,
|
||||||
batch: Batch,
|
batch: Batch,
|
||||||
groupId: number
|
groupId: number
|
||||||
) {
|
): Promise<TaskResult[]> {
|
||||||
const applyFromCacheOrRunBatchStart = performance.mark(
|
const applyFromCacheOrRunBatchStart = performance.mark(
|
||||||
'TaskOrchestrator-apply-from-cache-or-run-batch:start'
|
'TaskOrchestrator-apply-from-cache-or-run-batch:start'
|
||||||
);
|
);
|
||||||
@ -335,11 +332,9 @@ export class TaskOrchestrator {
|
|||||||
|
|
||||||
await this.preRunSteps(tasks, { groupId });
|
await this.preRunSteps(tasks, { groupId });
|
||||||
|
|
||||||
let results: {
|
let results: TaskResult[] = doNotSkipCache
|
||||||
task: Task;
|
? await this.applyCachedResults(tasks)
|
||||||
status: TaskStatus;
|
: [];
|
||||||
terminalOutput?: string;
|
|
||||||
}[] = doNotSkipCache ? await this.applyCachedResults(tasks) : [];
|
|
||||||
|
|
||||||
// Run tasks that were not cached
|
// Run tasks that were not cached
|
||||||
if (results.length !== taskEntries.length) {
|
if (results.length !== taskEntries.length) {
|
||||||
@ -389,9 +384,13 @@ export class TaskOrchestrator {
|
|||||||
applyFromCacheOrRunBatchStart.name,
|
applyFromCacheOrRunBatchStart.name,
|
||||||
applyFromCacheOrRunBatchEnd.name
|
applyFromCacheOrRunBatchEnd.name
|
||||||
);
|
);
|
||||||
|
return results;
|
||||||
}
|
}
|
||||||
|
|
||||||
private async runBatch(batch: Batch, env: NodeJS.ProcessEnv) {
|
private async runBatch(
|
||||||
|
batch: Batch,
|
||||||
|
env: NodeJS.ProcessEnv
|
||||||
|
): Promise<TaskResult[]> {
|
||||||
const runBatchStart = performance.mark('TaskOrchestrator-run-batch:start');
|
const runBatchStart = performance.mark('TaskOrchestrator-run-batch:start');
|
||||||
try {
|
try {
|
||||||
const batchProcess =
|
const batchProcess =
|
||||||
@ -405,6 +404,7 @@ export class TaskOrchestrator {
|
|||||||
const batchResultEntries = Object.entries(results);
|
const batchResultEntries = Object.entries(results);
|
||||||
return batchResultEntries.map(([taskId, result]) => ({
|
return batchResultEntries.map(([taskId, result]) => ({
|
||||||
...result,
|
...result,
|
||||||
|
code: result.success ? 0 : 1,
|
||||||
task: {
|
task: {
|
||||||
...this.taskGraph.tasks[taskId],
|
...this.taskGraph.tasks[taskId],
|
||||||
startTime: result.startTime,
|
startTime: result.startTime,
|
||||||
@ -416,6 +416,7 @@ export class TaskOrchestrator {
|
|||||||
} catch (e) {
|
} catch (e) {
|
||||||
return batch.taskGraph.roots.map((rootTaskId) => ({
|
return batch.taskGraph.roots.map((rootTaskId) => ({
|
||||||
task: this.taskGraph.tasks[rootTaskId],
|
task: this.taskGraph.tasks[rootTaskId],
|
||||||
|
code: 1,
|
||||||
status: 'failure' as TaskStatus,
|
status: 'failure' as TaskStatus,
|
||||||
}));
|
}));
|
||||||
} finally {
|
} finally {
|
||||||
@ -435,7 +436,7 @@ export class TaskOrchestrator {
|
|||||||
doNotSkipCache: boolean,
|
doNotSkipCache: boolean,
|
||||||
task: Task,
|
task: Task,
|
||||||
groupId: number
|
groupId: number
|
||||||
) {
|
): Promise<TaskResult> {
|
||||||
// Wait for task to be processed
|
// Wait for task to be processed
|
||||||
const taskSpecificEnv = await this.processedTasks.get(task.id);
|
const taskSpecificEnv = await this.processedTasks.get(task.id);
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user