feat(core): add ability to use globs as outputs (#10894)

* feat(core): add ability to use globs as outputs

* fix(repo): adjust the way outputs are tracked

* docs(core): improve outputs documentation
This commit is contained in:
Jason Jean 2022-07-14 18:31:12 -04:00 committed by GitHub
parent 61e1931ded
commit 63b74d2a56
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 548 additions and 84 deletions

View File

@ -120,12 +120,49 @@ sources (non-test sources) of its dependencies. In other words, it treats test s
### outputs ### outputs
`"outputs": ["dist/libs/mylib"]` tells Nx where the `build` target is going to create file artifacts. The provided value Targets may define outputs to tell Nx where the target is going to create file artifacts that Nx should cache. `"outputs": ["dist/libs/mylib"]` tells Nx where the `build` target is going to create file artifacts.
is actually the default, so we can omit it in this case. `"outputs": []` tells Nx that the `test` target doesn't create
any artifacts on disk.
This configuration is usually not needed. Nx comes with reasonable defaults (imported in `nx.json`) which implement the This configuration is usually not needed. Nx comes with reasonable defaults (imported in `nx.json`) which implement the configuration above.
configuration above.
#### Basic Example
Usually, a target writes to a specific directory or a file. The following instructs Nx to cache `dist/libs/mylib` and `build/libs/mylib/main.js`:
```json
{
"build": {
"outputs": ["dist/libs/mylib", "build/libs/mylib/main.js"]
}
}
```
#### Specifying Globs
Sometimes, multiple targets might write to the same directory. When possible it is recommended to direct these targets into separate directories.
```json
{
"build-js": {
"outputs": ["dist/libs/mylib/js"]
},
"build-css": {
"outputs": ["dist/libs/mylib/css"]
}
}
```
But if the above is not possible, globs can be specified as outputs to only cache a set of files rather than the whole directory.
```json
{
"build-js": {
"outputs": ["dist/libs/mylib/**/*.js"]
},
"build-css": {
"outputs": ["dist/libs/mylib/**/*.css"]
}
}
```
### dependsOn ### dependsOn

View File

@ -271,9 +271,85 @@ sources (non-test sources) of its dependencies. In other words, it treats test s
### Outputs ### Outputs
`"outputs": ["dist/libs/mylib"]` tells Nx where the `build` target is going to create file artifacts. The provided value Targets may define outputs to tell Nx where the target is going to create file artifacts that Nx should cache. `"outputs": ["dist/libs/mylib"]` tells Nx where the `build` target is going to create file artifacts.
is actually the default, so we can omit it in this case. `"outputs": []` tells Nx that the `test` target doesn't create
any artifacts on disk. #### Basic Example
Usually, a target writes to a specific directory or a file. The following instructs Nx to cache `dist/libs/mylib` and `build/libs/mylib/main.js`:
```json
{
"build": {
...,
"outputs": ["dist/libs/mylib", "build/libs/mylib/main.js"],
"options": {
...
},
}
}
```
#### Referencing Options
Most commonly, targets have an option for an output file or directory. Rather than duplicating the information as seen above, options can be referenced using the below syntax:
> When the `outputPath` option is changed, Nx will start caching the new path as well.
```json
{
"build": {
...,
"outputs": ["{options.outputPath}"],
"options": {
"outputPath": "dist/libs/mylib"
}
}
}
```
#### Specifying Globs
Sometimes, multiple targets might write to the same directory. When possible it is recommended to direct these targets into separate directories.
```json
{
"build-js": {
...,
"outputs": ["dist/libs/mylib/js"],
"options": {
"outputPath": "dist/libs/mylib/js"
}
},
"build-css": {
...,
"outputs": ["dist/libs/mylib/css"],
"options": {
"outputPath": "dist/libs/mylib/css"
}
}
}
```
But if the above is not possible, globs can be specified as outputs to only cache a set of files rather than the whole directory.
```json
{
"build-js": {
...,
"outputs": ["dist/libs/mylib/**/*.js"],
"options": {
"outputPath": "dist/libs/mylib"
}
},
"build-css": {
...,
"outputs": ["dist/libs/mylib/**/*.css"],
"options": {
"outputPath": "dist/libs/mylib"
}
}
}
```
### dependsOn ### dependsOn

View File

@ -99,7 +99,7 @@ describe('cache', () => {
expect(outputWithBothLintTasksCached).toContain( expect(outputWithBothLintTasksCached).toContain(
'read the output from the cache' 'read the output from the cache'
); );
expectCached(outputWithBothLintTasksCached, [ expectMatchedOutput(outputWithBothLintTasksCached, [
myapp1, myapp1,
myapp2, myapp2,
`${myapp1}-e2e`, `${myapp1}-e2e`,
@ -164,6 +164,80 @@ describe('cache', () => {
updateFile('nx.json', (c) => originalNxJson); updateFile('nx.json', (c) => originalNxJson);
}, 120000); }, 120000);
it('should support using globs as outputs', async () => {
const mylib = uniq('mylib');
runCLI(`generate @nrwl/workspace:library ${mylib}`);
updateProjectConfig(mylib, (c) => {
c.targets.build = {
executor: 'nx:run-commands',
outputs: ['dist/*.txt'],
options: {
commands: [
'rm -rf dist',
'mkdir dist',
'echo a > dist/a.txt',
'echo b > dist/b.txt',
'echo c > dist/c.txt',
'echo d > dist/d.txt',
'echo e > dist/e.txt',
'echo f > dist/f.txt',
],
parallel: false,
},
};
return c;
});
// Run without cache
const runWithoutCache = runCLI(`build ${mylib}`);
expect(runWithoutCache).not.toContain('read the output from the cache');
// Rerun without touching anything
const rerunWithUntouchedOutputs = runCLI(`build ${mylib}`);
expect(rerunWithUntouchedOutputs).toContain(
'existing outputs match the cache'
);
const outputsWithUntouchedOutputs = listFiles('dist');
expect(outputsWithUntouchedOutputs).toContain('a.txt');
expect(outputsWithUntouchedOutputs).toContain('b.txt');
expect(outputsWithUntouchedOutputs).toContain('c.txt');
expect(outputsWithUntouchedOutputs).toContain('d.txt');
expect(outputsWithUntouchedOutputs).toContain('e.txt');
expect(outputsWithUntouchedOutputs).toContain('f.txt');
// Create a file in the dist that does not match output glob
updateFile('dist/c.ts', '');
// Rerun
const rerunWithNewUnrelatedFile = runCLI(`build ${mylib}`);
expect(rerunWithNewUnrelatedFile).toContain(
'existing outputs match the cache'
);
const outputsAfterAddingUntouchedFileAndRerunning = listFiles('dist');
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain('a.txt');
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain('b.txt');
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain('c.txt');
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain('d.txt');
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain('e.txt');
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain('f.txt');
expect(outputsAfterAddingUntouchedFileAndRerunning).toContain('c.ts');
// Clear Dist
rmDist();
// Rerun
const rerunWithoutOutputs = runCLI(`build ${mylib}`);
expect(rerunWithoutOutputs).toContain('read the output from the cache');
const outputsWithoutOutputs = listFiles('dist');
expect(outputsWithoutOutputs).toContain('a.txt');
expect(outputsWithoutOutputs).toContain('b.txt');
expect(outputsWithoutOutputs).toContain('c.txt');
expect(outputsWithoutOutputs).toContain('d.txt');
expect(outputsWithoutOutputs).toContain('e.txt');
expect(outputsWithoutOutputs).toContain('f.txt');
expect(outputsWithoutOutputs).not.toContain('c.ts');
});
it('should use consider filesets when hashing', async () => { it('should use consider filesets when hashing', async () => {
const parent = uniq('parent'); const parent = uniq('parent');
const child1 = uniq('child1'); const child1 = uniq('child1');

View File

@ -0,0 +1,112 @@
import { Cache, collapseExpandedOutputs } from './cache';
describe('Cache', () => {
describe('collapseExpandedOutputs', () => {
it('should handle no outputs', async () => {
const outputs = [];
const res = collapseExpandedOutputs(outputs);
expect(res).toEqual([]);
});
it('should keep files as is', async () => {
const outputs = ['dist/apps/app1/0.js'];
const res = collapseExpandedOutputs(outputs);
expect(res).toEqual(['dist/apps/app1/0.js']);
});
it('should keep directories as is', async () => {
const outputs = ['dist/apps/app1'];
const res = collapseExpandedOutputs(outputs);
expect(res).toEqual(['dist/apps/app1']);
});
it('should keep short lists of directories as is', async () => {
const outputs = ['test-results/apps/app1', 'coverage/apps/app1'];
const res = collapseExpandedOutputs(outputs);
expect(res).toEqual(['test-results/apps/app1', 'coverage/apps/app1']);
});
it('should keep short lists of files as is', async () => {
const outputs = [
'test-results/apps/app1/results.xml',
'coverage/apps/app1/coverage.html',
];
const res = collapseExpandedOutputs(outputs);
expect(res).toEqual([
'test-results/apps/app1/results.xml',
'coverage/apps/app1/coverage.html',
]);
});
it('should collapse long lists of directories', async () => {
const outputs = [
'dist/apps/app1/a',
'dist/apps/app1/b',
'dist/apps/app1/c',
'dist/apps/app1/d',
'dist/apps/app1/e',
'dist/apps/app1/f',
];
const res = collapseExpandedOutputs(outputs);
expect(res).toEqual(['dist/apps/app1']);
});
it('should collapse long lists of directories + files', async () => {
const outputs = [
'coverage/apps/app1',
'dist/apps/app1/a.txt',
'dist/apps/app1/b.txt',
'dist/apps/app1/c.txt',
'dist/apps/app1/d.txt',
'dist/apps/app1/e.txt',
'dist/apps/app1/f.txt',
];
const res = collapseExpandedOutputs(outputs);
expect(res).toEqual(['coverage/apps/app1', 'dist/apps/app1']);
});
it('should keep long lists of top-level directories', async () => {
const outputs = ['a', 'b', 'c', 'd', 'e', 'f'];
const res = collapseExpandedOutputs(outputs);
expect(res).toEqual(['a', 'b', 'c', 'd', 'e', 'f']);
});
it('should collapse long lists of files', async () => {
const outputs = [
'dist/apps/app1/a.js',
'dist/apps/app1/b.js',
'dist/apps/app1/c.js',
'dist/apps/app1/d.js',
'dist/apps/app1/e.js',
'dist/apps/app1/f.js',
];
const res = collapseExpandedOutputs(outputs);
expect(res).toEqual(['dist/apps/app1']);
});
it('should collapse long lists of files in nested directories', async () => {
const outputs = [];
// Create dist/apps/app1/n/m.js + dist/apps/app1/n/m.d.ts
for (let i = 0; i < 6; i++) {
outputs.push(`dist/apps/app1/${i}.js`);
outputs.push(`dist/apps/app1/${i}.d.ts`);
for (let j = 0; j < 6; j++) {
outputs.push(`dist/apps/app1/${i}/${j}.js`);
outputs.push(`dist/apps/app1/${i}/${j}.d.ts`);
}
}
const res = collapseExpandedOutputs(outputs);
expect(res).toEqual(['dist/apps/app1']);
});
});
});

View File

@ -1,22 +1,23 @@
import { workspaceRoot } from '../utils/workspace-root'; import { workspaceRoot } from '../utils/workspace-root';
import { import {
copy, copy,
lstat,
mkdir, mkdir,
mkdirSync, mkdirSync,
pathExists,
readdir,
readFile, readFile,
remove, remove,
unlink, unlink,
writeFile, writeFile,
pathExists,
lstat,
readdir,
} from 'fs-extra'; } from 'fs-extra';
import { dirname, join, resolve, sep } from 'path'; import { dirname, join, relative, resolve, sep } from 'path';
import { DefaultTasksRunnerOptions } from './default-tasks-runner'; import { DefaultTasksRunnerOptions } from './default-tasks-runner';
import { spawn, execFile } from 'child_process'; import { execFile, spawn } from 'child_process';
import { cacheDir } from '../utils/cache-directory'; import { cacheDir } from '../utils/cache-directory';
import { platform } from 'os'; import { platform } from 'os';
import { Task } from '../config/task-graph'; import { Task } from '../config/task-graph';
import * as fastGlob from 'fast-glob';
export type CachedResult = { export type CachedResult = {
terminalOutput: string; terminalOutput: string;
@ -94,12 +95,16 @@ export class Cache {
); );
await mkdir(join(td, 'outputs')); await mkdir(join(td, 'outputs'));
const expandedOutputs = await this.expandOutputsInWorkspace(outputs);
const collapsedOutputs = collapseExpandedOutputs(expandedOutputs);
await Promise.all( await Promise.all(
outputs.map(async (f) => { expandedOutputs.map(async (f) => {
const src = join(this.root, f); const src = join(this.root, f);
if (await pathExists(src)) { if (await pathExists(src)) {
const cached = join(td, 'outputs', f);
const isFile = (await lstat(src)).isFile(); const isFile = (await lstat(src)).isFile();
const cached = join(td, 'outputs', f);
const directory = isFile ? dirname(cached) : cached; const directory = isFile ? dirname(cached) : cached;
await mkdir(directory, { recursive: true }); await mkdir(directory, { recursive: true });
await this.copy(src, cached); await this.copy(src, cached);
@ -117,7 +122,7 @@ export class Cache {
await this.options.remoteCache.store(task.hash, this.cachePath); await this.options.remoteCache.store(task.hash, this.cachePath);
} }
await this.recordOutputsHash(outputs, task.hash); await this.recordOutputsHash(collapsedOutputs, task.hash);
if (terminalOutput) { if (terminalOutput) {
const outputPath = this.temporaryOutputPath(task); const outputPath = this.temporaryOutputPath(task);
@ -132,9 +137,16 @@ export class Cache {
outputs: string[] outputs: string[]
) { ) {
return this.tryAndRetry(async () => { return this.tryAndRetry(async () => {
await this.removeRecordedOutputsHashes(outputs); const expandedOutputs = await this.expandOutputsInCache(
outputs,
cachedResult
);
const collapsedOutputs = collapseExpandedOutputs(expandedOutputs);
await this.removeRecordedOutputsHashes(collapsedOutputs);
await Promise.all( await Promise.all(
outputs.map(async (f) => { expandedOutputs.map(async (f) => {
const cached = join(cachedResult.outputsPath, f); const cached = join(cachedResult.outputsPath, f);
if (await pathExists(cached)) { if (await pathExists(cached)) {
const isFile = (await lstat(cached)).isFile(); const isFile = (await lstat(cached)).isFile();
@ -147,7 +159,8 @@ export class Cache {
} }
}) })
); );
await this.recordOutputsHash(outputs, hash);
await this.recordOutputsHash(collapsedOutputs, hash);
}); });
} }
@ -156,59 +169,94 @@ export class Cache {
} }
async removeRecordedOutputsHashes(outputs: string[]): Promise<void> { async removeRecordedOutputsHashes(outputs: string[]): Promise<void> {
for (const output of outputs) { await Promise.all(
const hashFile = this.getFileNameWithLatestRecordedHashForOutput(output); outputs.map(async (output) => {
try { const hashFile =
await unlink(hashFile); this.getFileNameWithLatestRecordedHashForOutput(output);
} catch {} try {
} await unlink(hashFile);
} catch {}
})
);
} }
async shouldCopyOutputsFromCache( async shouldCopyOutputsFromCache(
taskWithCachedResult: TaskWithCachedResult, taskWithCachedResult: TaskWithCachedResult,
outputs: string[] outputs: string[]
): Promise<boolean> { ): Promise<boolean> {
return ( const [outputsInCache, outputsInWorkspace] = await Promise.all([
(await this.areLatestOutputsHashesDifferentThanTaskHash( this.expandOutputsInCache(outputs, taskWithCachedResult.cachedResult),
outputs, this.expandOutputsInWorkspace(outputs),
taskWithCachedResult.task.hash ]);
)) ||
(await this.isAnyOutputMissing( const collapsedOutputsInCache = collapseExpandedOutputs(outputsInCache);
taskWithCachedResult.cachedResult,
outputs const [latestHashesDifferent, outputMissing] = await Promise.all([
)) this.areLatestOutputsHashesDifferentThanTaskHash(
); collapsedOutputsInCache,
taskWithCachedResult
),
this.haveOutputsBeenAddedOrRemoved(
taskWithCachedResult,
outputsInCache,
outputsInWorkspace
),
]);
return latestHashesDifferent || outputMissing;
} }
private copy(src: string, directory: string): Promise<void> { private async expandOutputsInWorkspace(outputs: string[]) {
if (this.useFsExtraToCopyAndRemove) { return this._expandOutputs(outputs, workspaceRoot);
return copy(src, directory); }
}
private async expandOutputsInCache(
outputs: string[],
cachedResult: CachedResult
) {
return this._expandOutputs(outputs, cachedResult.outputsPath);
}
private async _expandOutputs(outputs: string[], cwd: string) {
return (
await Promise.all(
outputs.map(async (entry) => {
if (await pathExists(join(cwd, entry))) {
return entry;
}
return fastGlob(entry, { cwd });
})
)
).flat();
}
private async copy(src: string, destination: string): Promise<void> {
if (this.useFsExtraToCopyAndRemove) {
return copy(src, destination);
}
return new Promise((res, rej) => { return new Promise((res, rej) => {
execFile('cp', ['-a', src, dirname(directory)], (error) => { execFile('cp', ['-a', src, dirname(destination)], (error) => {
if (!error) { if (!error) {
res(); res();
} else { } else {
this.useFsExtraToCopyAndRemove = true; this.useFsExtraToCopyAndRemove = true;
copy(src, directory).then(res, rej); copy(src, destination).then(res, rej);
} }
}); });
}); });
} }
private remove(folder: string): Promise<void> { private async remove(path: string): Promise<void> {
if (this.useFsExtraToCopyAndRemove) { if (this.useFsExtraToCopyAndRemove) {
return remove(folder); return remove(path);
} }
return new Promise<void>((res, rej) => { return new Promise<void>((res, rej) => {
execFile('rm', ['-rf', folder], (error) => { execFile('rm', ['-rf', path], (error) => {
if (!error) { if (!error) {
res(); res();
} else { } else {
this.useFsExtraToCopyAndRemove = true; this.useFsExtraToCopyAndRemove = true;
remove(folder).then(res, rej); remove(path).then(res, rej);
} }
}); });
}); });
@ -218,22 +266,56 @@ export class Cache {
outputs: string[], outputs: string[],
hash: string hash: string
): Promise<void> { ): Promise<void> {
for (const output of outputs) { await mkdir(this.latestOutputsHashesDir, { recursive: true });
const hashFile = this.getFileNameWithLatestRecordedHashForOutput(output);
try { await Promise.all(
await mkdir(dirname(hashFile), { recursive: true }); outputs.map(async (output) => {
await writeFile(hashFile, hash); const hashFile =
} catch {} this.getFileNameWithLatestRecordedHashForOutput(output);
} try {
await writeFile(hashFile, hash);
} catch {}
})
);
} }
private async areLatestOutputsHashesDifferentThanTaskHash( private async areLatestOutputsHashesDifferentThanTaskHash(
outputs: string[], outputs: string[],
hash: string { task }: TaskWithCachedResult
) { ) {
for (let output of outputs) { const latestExistingOutputHashes = (
if ((await this.getLatestRecordedHashForTask(output)) !== hash) await readdir(this.latestOutputsHashesDir)
).map((m) => m.substring(0, m.length - 5));
// Purposely blocking
for (const output of outputs) {
const latestOutputFilename = this.getLatestOutputHashFilename(output);
const conflicts = latestExistingOutputHashes.filter((w) => {
// This is the exact same output
return (
w !== latestOutputFilename &&
// This is an child of the output
(latestOutputFilename.startsWith(w) ||
// This is a parent of the output
w.startsWith(latestOutputFilename))
);
});
if (conflicts.length > 0) {
// Clean up the conflicts
await Promise.all(
conflicts.map((conflict) =>
unlink(join(this.latestOutputsHashesDir, conflict + '.hash'))
)
);
return true; return true;
}
const hash = await this.getLatestRecordedHashForTask(output);
if (!!hash && hash !== task.hash) {
return true;
}
} }
return false; return false;
} }
@ -251,48 +333,83 @@ export class Cache {
} }
} }
private async isAnyOutputMissing( private async haveOutputsBeenAddedOrRemoved(
cachedResult: CachedResult, result: TaskWithCachedResult,
outputs: string[] cachedOutputs: string[],
workspaceFiles: string[]
): Promise<boolean> { ): Promise<boolean> {
for (let output of outputs) { const workspaceSet = new Set(workspaceFiles);
const cacheOutputPath = join(cachedResult.outputsPath, output);
const rootOutputPath = join(this.root, output);
if ( for (const path of cachedOutputs) {
(await pathExists(cacheOutputPath)) && if (!(await pathExists(join(workspaceRoot, path)))) {
(await lstat(cacheOutputPath)).isFile()
) {
return (
(await pathExists(join(cachedResult.outputsPath, output))) &&
!(await pathExists(join(this.root, output)))
);
}
const haveDifferentAmountOfFiles =
(await pathExists(cacheOutputPath)) &&
(await pathExists(rootOutputPath)) &&
(await readdir(cacheOutputPath)).length !==
(await readdir(rootOutputPath)).length;
if (
((await pathExists(cacheOutputPath)) &&
!(await pathExists(rootOutputPath))) ||
haveDifferentAmountOfFiles
) {
return true; return true;
} }
const isFile = (await lstat(path)).isFile();
if (!workspaceSet.has(path)) {
return true;
}
if (!isFile) {
const [cachedFiles, workspaceFiles] = await Promise.all([
this.getFilesInDirectory(join(result.cachedResult.outputsPath, path)),
this.getFilesInDirectory(join(workspaceRoot, path)),
]);
if (workspaceFiles.size !== cachedFiles.size) {
return true;
}
for (const file of cachedFiles) {
if (!workspaceFiles.has(file)) {
return true;
}
workspaceFiles.delete(file);
}
if (workspaceFiles.size !== 0) {
return true;
}
}
workspaceSet.delete(path);
}
if (workspaceSet.size !== 0) {
return true;
} }
return false; return false;
} }
private async getFilesInDirectory(path: string): Promise<Set<string>> {
const paths = new Set<string>();
await this.visitDirectory(path, (entry) => {
paths.add(relative(path, entry));
});
return paths;
}
private async visitDirectory(path: string, visitor: (path: string) => void) {
const children = await readdir(join(path), {
withFileTypes: true,
});
await Promise.all(
children.map(async (child) => {
if (child.isFile()) {
visitor(join(path, child.name));
} else {
await this.visitDirectory(join(path, child.name), visitor);
}
})
);
}
private getFileNameWithLatestRecordedHashForOutput(output: string): string { private getFileNameWithLatestRecordedHashForOutput(output: string): string {
return join( return join(
this.latestOutputsHashesDir, this.latestOutputsHashesDir,
`${output.split(sep).join('-')}.hash` `${this.getLatestOutputHashFilename(output)}.hash`
); );
} }
private getLatestOutputHashFilename(output: string) {
return output.split(sep).join('-');
}
private async getFromLocalDir(task: Task) { private async getFromLocalDir(task: Task) {
const tdCommit = join(this.cachePath, `${task.hash}.commit`); const tdCommit = join(this.cachePath, `${task.hash}.commit`);
const td = join(this.cachePath, task.hash); const td = join(this.cachePath, task.hash);
@ -352,3 +469,51 @@ export class Cache {
return _try(); return _try();
} }
} }
/**
* Heuristic to prevent writing too many hash files
*/
const MAX_OUTPUTS_TO_CHECK_HASHES = 5;
/**
* Collapse Expanded Outputs back into a smaller set of directories/files to track
* Note: DO NOT USE, Only exported for unit testing
* */
export function collapseExpandedOutputs(expandedOutputs: string[]) {
const tree: Set<string>[] = [];
// Create a Tree of directories/files
for (const output of expandedOutputs) {
const pathParts = [];
pathParts.unshift(output);
let dir = dirname(output);
while (dir !== dirname(dir)) {
pathParts.unshift(dir);
dir = dirname(dir);
}
for (let i = 0; i < pathParts.length; i++) {
tree[i] ??= new Set<string>();
tree[i].add(pathParts[i]);
}
}
// Find a level in the tree that has too many outputs
if (tree.length === 0) {
return [];
}
let j = 0;
let level = tree[j];
for (j = 0; j < tree.length; j++) {
level = tree[j];
if (level.size > MAX_OUTPUTS_TO_CHECK_HASHES) {
break;
}
}
// Return the level before the level with too many outputs
// If the first level has too many outputs, return that one.
return Array.from(tree[Math.max(0, j - 1)]);
}