@@ -7,6 +7,13 @@ import { createBrotliCompress, createGzip } from "node:zlib";
7
7
8
8
import * as logger from "./logger.js" ;
9
9
10
+ interface CompressionOptions {
11
+ dir : string ;
12
+ extensions : Array < string > ;
13
+ enabled ?: boolean ;
14
+ batchSize ?: number ;
15
+ }
16
+
10
17
async function * walkDir ( dir : string , extensions : Array < string > ) : AsyncGenerator < string > {
11
18
const entries = await readdir ( dir , { withFileTypes : true } ) ;
12
19
for ( const entry of entries ) {
@@ -23,44 +30,70 @@ const filterFile = (file: string, extensions: Array<string>): boolean => {
23
30
return extensions . some ( ( ext ) => extname ( file ) === ext ) ;
24
31
} ;
25
32
26
- export const gzip = async ( dir : string , extensions : Array < string > , enabled ?: boolean ) : Promise < void > => {
33
+ // const compress = async <T>(name: string, compressor: () => T, opts: CompressionOptions): Promise<void> => {};
34
+
35
+ export const gzip = async (
36
+ dir : string ,
37
+ extensions : Array < string > ,
38
+ enabled ?: boolean ,
39
+ batchSize = 10 ,
40
+ ) : Promise < void > => {
27
41
if ( ! enabled ) {
28
42
logger . warn ( "gzip compression disabled, skipping..." ) ;
29
43
return ;
30
44
}
31
45
32
46
const start = hrtime . bigint ( ) ;
33
-
34
- let counter = 0 ;
47
+ const files = [ ] ;
35
48
for await ( const file of walkDir ( dir , extensions ) ) {
36
- counter += 1 ;
37
- const source = createReadStream ( file ) ;
38
- const destination = createWriteStream ( `${ file } .gz` ) ;
39
- const gzip = createGzip ( { level : 9 } ) ;
40
- await stream . pipeline ( source , gzip , destination ) ;
49
+ files . push ( file ) ;
50
+ }
51
+
52
+ for ( let i = 0 ; i < files . length ; i += batchSize ) {
53
+ const batch = files . slice ( i , i + batchSize ) ;
54
+ await Promise . all (
55
+ batch . map ( async ( path ) => {
56
+ const source = createReadStream ( path ) ;
57
+ const destination = createWriteStream ( `${ path } .br` ) ;
58
+ const brotli = createGzip ( { level : 9 } ) ;
59
+ await stream . pipeline ( source , brotli , destination ) ;
60
+ } ) ,
61
+ ) ;
41
62
}
42
63
43
64
const end = hrtime . bigint ( ) ;
44
- logger . success ( `finished gzip of ${ counter } files in ${ ( end - start ) / BigInt ( 1000000 ) } ms` ) ;
65
+ logger . success ( `finished gzip of ${ files . length } files in ${ ( end - start ) / BigInt ( 1000000 ) } ms` ) ;
45
66
} ;
46
67
47
- export const brotli = async ( dir : string , extensions : Array < string > , enabled ?: boolean ) : Promise < void > => {
68
+ export const brotli = async (
69
+ dir : string ,
70
+ extensions : Array < string > ,
71
+ enabled ?: boolean ,
72
+ batchSize = 10 ,
73
+ ) : Promise < void > => {
48
74
if ( ! enabled ) {
49
75
logger . warn ( "brotli compression disabled, skipping..." ) ;
50
76
return ;
51
77
}
52
78
53
79
const start = hrtime . bigint ( ) ;
54
-
55
- let counter = 0 ;
80
+ const files = [ ] ;
56
81
for await ( const file of walkDir ( dir , extensions ) ) {
57
- counter += 1 ;
58
- const source = createReadStream ( file ) ;
59
- const destination = createWriteStream ( `${ file } .br` ) ;
60
- const brotli = createBrotliCompress ( ) ;
61
- await stream . pipeline ( source , brotli , destination ) ;
82
+ files . push ( file ) ;
83
+ }
84
+
85
+ for ( let i = 0 ; i < files . length ; i += batchSize ) {
86
+ const batch = files . slice ( i , i + batchSize ) ;
87
+ await Promise . all (
88
+ batch . map ( async ( path ) => {
89
+ const source = createReadStream ( path ) ;
90
+ const destination = createWriteStream ( `${ path } .br` ) ;
91
+ const brotli = createBrotliCompress ( ) ;
92
+ await stream . pipeline ( source , brotli , destination ) ;
93
+ } ) ,
94
+ ) ;
62
95
}
63
96
64
97
const end = hrtime . bigint ( ) ;
65
- logger . success ( `finished brotli of ${ counter } files in ${ ( end - start ) / BigInt ( 1000000 ) } ms` ) ;
98
+ logger . success ( `finished brotli of ${ files . length } files in ${ ( end - start ) / BigInt ( 1000000 ) } ms` ) ;
66
99
} ;
0 commit comments