]> source.dussan.org Git - jquery.git/commitdiff
Build: migrate more uses of fs.promises; use node: protocol
authorTimmy Willison <timmywil@users.noreply.github.com>
Sun, 10 Mar 2024 16:19:15 +0000 (12:19 -0400)
committerTimmy Willison <timmywil@users.noreply.github.com>
Mon, 11 Mar 2024 17:29:23 +0000 (13:29 -0400)
Ref gh-5440

12 files changed:
build/release.js
build/release/authors.js
build/release/cdn.js
build/release/dist.js
build/tasks/compare_size.mjs
build/tasks/lib/isCleanWorkingDir.js
build/tasks/minify.js
build/tasks/node_smoke_tests.js
build/tasks/npmcopy.js
build/tasks/promises_aplus_tests.js
build/tasks/qunit-fixture.js
test/runner/createTestServer.js

index d49422129fc7f4dadcf8115215eac06ce66b61be..bdfb0bc3b9f36a6646ac2b99bd96c61375b01130 100644 (file)
@@ -1,6 +1,6 @@
 "use strict";
 
-var fs = require( "fs" );
+const fs = require( "node:fs" );
 
 module.exports = function( Release ) {
 
index 9ecd9f642255cde9974521bce9ee01e27807bdb0..bf72b8af76e657b6bf2bc0efabe684e6962b175c 100644 (file)
@@ -1,6 +1,6 @@
 "use strict";
 
-const fs = require( "node:fs" );
+const fs = require( "node:fs/promises" );
 const util = require( "node:util" );
 const exec = util.promisify( require( "node:child_process" ).exec );
 const rnewline = /\r?\n/;
@@ -40,7 +40,7 @@ function cloneSizzle() {
 }
 
 async function getLastAuthor() {
-       const authorsTxt = await fs.promises.readFile( "AUTHORS.txt", "utf8" );
+       const authorsTxt = await fs.readFile( "AUTHORS.txt", "utf8" );
        return authorsTxt.trim().split( rnewline ).pop();
 }
 
@@ -93,7 +93,7 @@ async function updateAuthors() {
        const authors = await getAuthors();
 
        const authorsTxt = "Authors ordered by first contribution.\n\n" + authors.join( "\n" ) + "\n";
-       await fs.promises.writeFile( "AUTHORS.txt", authorsTxt );
+       await fs.writeFile( "AUTHORS.txt", authorsTxt );
 
        console.log( "AUTHORS.txt updated" );
 }
index a75ad730346a03e486e5ae7fd989c2a9cc0cf7c4..6102996617cd0d73b539d54d1f2162d4bc2ef65b 100644 (file)
@@ -1,9 +1,9 @@
 "use strict";
 
-const fs = require( "fs" );
+const fs = require( "node:fs" );
 const shell = require( "shelljs" );
-const path = require( "path" );
-const os = require( "os" );
+const path = require( "node:path" );
+const os = require( "node:os" );
 
 const cdnFolderContainer = "dist/cdn";
 const cdnFolderVersioned = `${ cdnFolderContainer }/versioned`;
@@ -49,7 +49,7 @@ function makeReleaseCopies( Release ) {
        ].forEach( ( { filesMap, cdnFolder } ) => {
                shell.mkdir( "-p", cdnFolder );
 
-               Object.keys( filesMap ).forEach( key => {
+               Object.keys( filesMap ).forEach( ( key ) => {
                        let text;
                        const builtFile = filesMap[ key ];
                        const unpathedFile = key.replace( /@VER/g, Release.newVersion );
@@ -60,28 +60,33 @@ function makeReleaseCopies( Release ) {
                                // Map files need to reference the new uncompressed name;
                                // assume that all files reside in the same directory.
                                // "file":"jquery.min.js" ... "sources":["jquery.js"]
-                               text = fs.readFileSync( builtFile, "utf8" )
-                                       .replace( /"file":"([^"]+)"/,
-                                               "\"file\":\"" + unpathedFile.replace( /\.min\.map/, ".min.js\"" ) )
-                                       .replace( /"sources":\["([^"]+)"\]/,
-                                               "\"sources\":[\"" + unpathedFile.replace( /\.min\.map/, ".js" ) + "\"]" );
+                               text = fs
+                                       .readFileSync( builtFile, "utf8" )
+                                       .replace(
+                                               /"file":"([^"]+)"/,
+                                               `"file":"${ unpathedFile.replace( /\.min\.map/, ".min.js" ) }"`
+                                       )
+                                       .replace(
+                                               /"sources":\["([^"]+)"\]/,
+                                               `"sources":["${ unpathedFile.replace( /\.min\.map/, ".js" ) }"]`
+                                       );
                                fs.writeFileSync( releaseFile, text );
                        } else if ( builtFile !== releaseFile ) {
                                shell.cp( "-f", builtFile, releaseFile );
                        }
                } );
-
        } );
 }
 
 async function makeArchives( Release ) {
-
        Release.chdir( Release.dir.repo );
 
        async function makeArchive( { cdn, filesMap, cdnFolder } ) {
                return new Promise( ( resolve, reject ) => {
                        if ( Release.preRelease ) {
-                               console.log( "Skipping archive creation for " + cdn + "; this is a beta release." );
+                               console.log(
+                                       `Skipping archive creation for ${ cdn }; this is a beta release.`
+                               );
                                resolve();
                                return;
                        }
@@ -99,7 +104,7 @@ async function makeArchives( Release ) {
 
                        output.on( "close", resolve );
 
-                       output.on( "error", err => {
+                       output.on( "error", ( err ) => {
                                reject( err );
                        } );
 
@@ -107,33 +112,37 @@ async function makeArchives( Release ) {
 
                        let finalFilesMap = Object.create( null );
                        for ( const [ releaseFile, builtFile ] of Object.entries( filesMap ) ) {
-                               finalFilesMap[ releaseFile.replace( rver, Release.newVersion ) ] = builtFile;
+                               finalFilesMap[ releaseFile.replace( rver, Release.newVersion ) ] =
+                                       builtFile;
                        }
 
-                       const files = Object
-                               .keys( filesMap )
-                               .map( item => `${ cdnFolder }/${
-                                       item.replace( rver, Release.newVersion )
-                               }` );
+                       const files = Object.keys( filesMap ).map(
+                               ( item ) => `${ cdnFolder }/${ item.replace( rver, Release.newVersion ) }`
+                       );
 
                        if ( os.platform() === "win32" ) {
                                sum = [];
                                for ( i = 0; i < files.length; i++ ) {
                                        result = Release.exec(
-                                               "certutil -hashfile " + files[ i ] + " MD5", "Error retrieving md5sum"
+                                               "certutil -hashfile " + files[ i ] + " MD5",
+                                               "Error retrieving md5sum"
                                        );
                                        sum.push( rmd5.exec( result )[ 0 ] + " " + files[ i ] );
                                }
                                sum = sum.join( "\n" );
                        } else {
-                               sum = Release.exec( "md5 -r " + files.join( " " ), "Error retrieving md5sum" );
+                               sum = Release.exec(
+                                       "md5 -r " + files.join( " " ),
+                                       "Error retrieving md5sum"
+                               );
                        }
                        fs.writeFileSync( md5file, sum );
                        files.push( md5file );
 
-                       files.forEach( file => {
-                               archiver.append( fs.createReadStream( file ),
-                                       { name: path.basename( file ) } );
+                       files.forEach( ( file ) => {
+                               archiver.append( fs.createReadStream( file ), {
+                                       name: path.basename( file )
+                               } );
                        } );
 
                        archiver.finalize();
index 9851262a295d4f62a4b7fc2575b1bd1b4ee40227..e71dd3b4dbb9bf6688dea443f6cf94617b469095 100644 (file)
@@ -2,7 +2,7 @@
 
 module.exports = function( Release, files, complete ) {
 
-       const fs = require( "fs" ).promises;
+       const fs = require( "node:fs/promises" );
        const shell = require( "shelljs" );
        const inquirer = require( "inquirer" );
        const pkg = require( `${ Release.dir.repo }/package.json` );
index c8e55bb42bf8e3646414a658fc1d59b8d77581c0..ea239fb27fb2261764abcb0ee2de5bb1f59189ed 100644 (file)
@@ -1,5 +1,5 @@
 import chalk from "chalk";
-import fs from "node:fs";
+import fs from "node:fs/promises";
 import { promisify } from "node:util";
 import zlib from "node:zlib";
 import { exec as nodeExec } from "node:child_process";
@@ -34,7 +34,7 @@ function getBranchHeader( branch, commit ) {
 async function getCache( loc ) {
        let cache;
        try {
-               const contents = await fs.promises.readFile( loc, "utf8" );
+               const contents = await fs.readFile( loc, "utf8" );
                cache = JSON.parse( contents );
        } catch ( err ) {
                return {};
@@ -60,7 +60,7 @@ function cacheResults( results ) {
 }
 
 function saveCache( loc, cache ) {
-       return fs.promises.writeFile( loc, JSON.stringify( cache ) );
+       return fs.writeFile( loc, JSON.stringify( cache ) );
 }
 
 function compareSizes( existing, current, padLength ) {
@@ -104,7 +104,7 @@ export async function compareSize( { cache = ".sizecache.json", files } = {} ) {
        const results = await Promise.all(
                files.map( async function( filename ) {
 
-                       let contents = await fs.promises.readFile( filename, "utf8" );
+                       let contents = await fs.readFile( filename, "utf8" );
 
                        // Remove the short SHA and .dirty from comparisons.
                        // The short SHA so commits can be compared against each other
index 16c87fd9d3320bc1a3674a718f873544c12811db..3ad8f89bcff325e203a865c61df85971931bc820 100644 (file)
@@ -1,7 +1,7 @@
 "use strict";
 
-const util = require( "util" );
-const exec = util.promisify( require( "child_process" ).exec );
+const util = require( "node:util" );
+const exec = util.promisify( require( "node:child_process" ).exec );
 
 module.exports = async function isCleanWorkingDir() {
        const { stdout } = await exec( "git status --untracked-files=no --porcelain" );
index 9aacc31e82018b2d54331df71a922ec530c3601e..8c536c1ef36d28b73af0794aa57e8dd3e62f2ad4 100644 (file)
@@ -1,15 +1,15 @@
 "use strict";
 
 const swc = require( "@swc/core" );
-const fs = require( "fs" );
-const path = require( "path" );
+const fs = require( "node:fs/promises" );
+const path = require( "node:path" );
 const processForDist = require( "./dist" );
 const getTimestamp = require( "./lib/getTimestamp" );
 
 const rjs = /\.js$/;
 
 module.exports = async function minify( { filename, dir, esm } ) {
-       const contents = await fs.promises.readFile( path.join( dir, filename ), "utf8" );
+       const contents = await fs.readFile( path.join( dir, filename ), "utf8" );
        const version = /jQuery JavaScript Library ([^\n]+)/.exec( contents )[ 1 ];
 
        const { code, map: incompleteMap } = await swc.minify(
@@ -48,11 +48,11 @@ module.exports = async function minify( { filename, dir, esm } ) {
        } );
 
        await Promise.all( [
-               fs.promises.writeFile(
+               fs.writeFile(
                        path.join( dir, minFilename ),
                        code
                ),
-               fs.promises.writeFile(
+               fs.writeFile(
                        path.join( dir, mapFilename ),
                        map
                )
index 433a005d5c09394b941fdc4dd1f8bcfe0b44a7aa..6f99b9981df7808b22806740230df73b85cbf84c 100644 (file)
@@ -1,8 +1,8 @@
 "use strict";
 
-const fs = require( "fs" );
-const util = require( "util" );
-const exec = util.promisify( require( "child_process" ).exec );
+const fs = require( "node:fs/promises" );
+const util = require( "node:util" );
+const exec = util.promisify( require( "node:child_process" ).exec );
 const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
 
 const allowedLibraryTypes = [ "regular", "factory" ];
@@ -26,7 +26,7 @@ async function runTests( { libraryType, sourceType, module } ) {
                } ${ sourceType } "${ module }"` );
        }
        const dir = `./test/node_smoke_tests/${ sourceType }/${ libraryType }`;
-       const files = await fs.promises.readdir( dir, { withFileTypes: true } );
+       const files = await fs.readdir( dir, { withFileTypes: true } );
        const testFiles = files.filter( ( testFilePath ) => testFilePath.isFile() );
 
        if ( !testFiles.length ) {
index 750f26d8a79cc0107d68b51f6a06dfed06eb2582..93c0658b951ea6ed213de05bebebd08eada215c9 100644 (file)
@@ -1,7 +1,7 @@
 "use strict";
 
-const fs = require( "fs" );
-const path = require( "path" );
+const fs = require( "node:fs/promises" );
+const path = require( "node:path" );
 
 const projectDir = path.resolve( __dirname, "..", ".." );
 
@@ -26,15 +26,15 @@ const files = {
 };
 
 async function npmcopy() {
-       await fs.promises.mkdir( path.resolve( projectDir, "external" ), {
+       await fs.mkdir( path.resolve( projectDir, "external" ), {
                recursive: true
        } );
        for ( const [ dest, source ] of Object.entries( files ) ) {
                const from = path.resolve( projectDir, "node_modules", source );
                const to = path.resolve( projectDir, "external", dest );
                const toDir = path.dirname( to );
-               await fs.promises.mkdir( toDir, { recursive: true } );
-               await fs.promises.copyFile( from, to );
+               await fs.mkdir( toDir, { recursive: true } );
+               await fs.copyFile( from, to );
                console.log( `${ source } → ${ dest }` );
        }
 }
index d917b5848720a9d67699bb5891339fcd221bbd38..6360de6c4279e61b3ae48b532250fb9f1aaa0179 100644 (file)
@@ -1,9 +1,9 @@
 "use strict";
 
-const { spawn } = require( "child_process" );
+const { spawn } = require( "node:child_process" );
 const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
-const path = require( "path" );
-const os = require( "os" );
+const path = require( "node:path" );
+const os = require( "node:os" );
 
 if ( !verifyNodeVersion() ) {
        return;
index 3059bb92906d688769a5b85d518889f51facad8a..dbb789b608ce54c68259bdea223c621b5f34b607 100644 (file)
@@ -1,10 +1,10 @@
 "use strict";
 
-const fs = require( "fs" );
+const fs = require( "node:fs/promises" );
 
 async function generateFixture() {
-       const fixture = await fs.promises.readFile( "./test/data/qunit-fixture.html", "utf8" );
-       await fs.promises.writeFile(
+       const fixture = await fs.readFile( "./test/data/qunit-fixture.html", "utf8" );
+       await fs.writeFile(
                "./test/data/qunit-fixture.js",
                "// Generated by build/tasks/qunit-fixture.js\n" +
                "QUnit.config.fixture = " +
index c591917e5e42d6078dfc0fed385edf7a13140b5c..8021db40fd056140669624b24570bc436a598e70 100644 (file)
@@ -1,7 +1,7 @@
 import bodyParser from "body-parser";
 import express from "express";
 import bodyParserErrorHandler from "express-body-parser-error-handler";
-import { readFile } from "fs/promises";
+import { readFile } from "node:fs/promises";
 import mockServer from "../middleware-mockserver.cjs";
 
 export async function createTestServer( report ) {