Skip to content

Commit aaca7c4

Browse files
Batch reading content files to prevent too many open files error (#12079)
* Refactor * Refactor * Batch content file reads in Node into groups of 500 We shouldn’t need to do this for our Rust code because it utilizes Rayon’s default thread pool for parallelism. This threadpool has roughly the number of cores as the number of available threads except when overridden. This generally is much, much lower than 500 and can be explicitly overridden via an env var to work around potential issues with open file descriptors if anyone ever runs into that. * Fix sequential/parallel flip * Update changelog
1 parent 64c7d0e commit aaca7c4

File tree

3 files changed

+39
-34
lines changed

3 files changed

+39
-34
lines changed

CHANGELOG.md

+1
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
1818
- Fix incorrectly generated CSS when using square brackets inside arbitrary properties ([#11709](https://github.com/tailwindlabs/tailwindcss/pull/11709))
1919
- Make `content` optional for presets in TypeScript types ([#11730](https://github.com/tailwindlabs/tailwindcss/pull/11730))
2020
- Handle variable colors that have variable fallback values ([#12049](https://github.com/tailwindlabs/tailwindcss/pull/12049))
21+
- Batch reading content files to prevent `too many open files` error ([#12079](https://github.com/tailwindlabs/tailwindcss/pull/12079))
2122

2223
### Added
2324

oxide/crates/core/src/lib.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -342,8 +342,8 @@ pub fn parse_candidate_strings(input: Vec<ChangedContent>, options: u8) -> Vec<S
342342

343343
match (IO::from(options), Parsing::from(options)) {
344344
(IO::Sequential, Parsing::Sequential) => parse_all_blobs_sync(read_all_files_sync(input)),
345-
(IO::Sequential, Parsing::Parallel) => parse_all_blobs_sync(read_all_files(input)),
346-
(IO::Parallel, Parsing::Sequential) => parse_all_blobs(read_all_files_sync(input)),
345+
(IO::Sequential, Parsing::Parallel) => parse_all_blobs(read_all_files_sync(input)),
346+
(IO::Parallel, Parsing::Sequential) => parse_all_blobs_sync(read_all_files(input)),
347347
(IO::Parallel, Parsing::Parallel) => parse_all_blobs(read_all_files(input)),
348348
}
349349
}

src/lib/expandTailwindAtRules.js

+36-32
Original file line numberDiff line numberDiff line change
@@ -135,43 +135,47 @@ export default function expandTailwindAtRules(context) {
135135

136136
env.DEBUG && console.time('Reading changed files')
137137

138-
if (flagEnabled(context.tailwindConfig, 'oxideParser')) {
139-
let rustParserContent = []
140-
let regexParserContent = []
141-
142-
for (let item of context.changedContent) {
143-
let transformer = getTransformer(context.tailwindConfig, item.extension)
144-
let extractor = getExtractor(context, item.extension)
145-
146-
if (transformer === builtInTransformers.DEFAULT && extractor?.DEFAULT_EXTRACTOR === true) {
147-
rustParserContent.push(item)
148-
} else {
149-
regexParserContent.push([item, { transformer, extractor }])
150-
}
138+
/** @type {[item: {file?: string, content?: string}, meta: {transformer: any, extractor: any}][]} */
139+
let regexParserContent = []
140+
141+
/** @type {{file?: string, content?: string}[]} */
142+
let rustParserContent = []
143+
144+
for (let item of context.changedContent) {
145+
let transformer = getTransformer(context.tailwindConfig, item.extension)
146+
let extractor = getExtractor(context, item.extension)
147+
148+
if (
149+
flagEnabled(context.tailwindConfig, 'oxideParser') &&
150+
transformer === builtInTransformers.DEFAULT &&
151+
extractor?.DEFAULT_EXTRACTOR === true
152+
) {
153+
rustParserContent.push(item)
154+
} else {
155+
regexParserContent.push([item, { transformer, extractor }])
151156
}
157+
}
152158

153-
if (rustParserContent.length > 0) {
154-
for (let candidate of parseCandidateStrings(
155-
rustParserContent,
156-
IO.Parallel | Parsing.Parallel
157-
)) {
158-
candidates.add(candidate)
159-
}
159+
// Read files using our newer, faster parser when:
160+
// - Oxide is enabled; AND
161+
// - The file is using default transfomers and extractors
162+
if (rustParserContent.length > 0) {
163+
for (let candidate of parseCandidateStrings(
164+
rustParserContent,
165+
IO.Parallel | Parsing.Parallel
166+
)) {
167+
candidates.add(candidate)
160168
}
169+
}
170+
171+
// Otherwise, read any files in node and parse with regexes
172+
const BATCH_SIZE = 500
173+
174+
for (let i = 0; i < regexParserContent.length; i += BATCH_SIZE) {
175+
let batch = regexParserContent.slice(i, i + BATCH_SIZE)
161176

162-
if (regexParserContent.length > 0) {
163-
await Promise.all(
164-
regexParserContent.map(async ([{ file, content }, { transformer, extractor }]) => {
165-
content = file ? await fs.promises.readFile(file, 'utf8') : content
166-
getClassCandidates(transformer(content), extractor, candidates, seen)
167-
})
168-
)
169-
}
170-
} else {
171177
await Promise.all(
172-
context.changedContent.map(async ({ file, content, extension }) => {
173-
let transformer = getTransformer(context.tailwindConfig, extension)
174-
let extractor = getExtractor(context, extension)
178+
batch.map(async ([{ file, content }, { transformer, extractor }]) => {
175179
content = file ? await fs.promises.readFile(file, 'utf8') : content
176180
getClassCandidates(transformer(content), extractor, candidates, seen)
177181
})

0 commit comments

Comments
 (0)