"use strict";
-var fs = require( "fs" );
+const fs = require( "node:fs" );
module.exports = function( Release ) {
"use strict";
-const fs = require( "node:fs" );
+const fs = require( "node:fs/promises" );
const util = require( "node:util" );
const exec = util.promisify( require( "node:child_process" ).exec );
const rnewline = /\r?\n/;
}
async function getLastAuthor() {
- const authorsTxt = await fs.promises.readFile( "AUTHORS.txt", "utf8" );
+ const authorsTxt = await fs.readFile( "AUTHORS.txt", "utf8" );
return authorsTxt.trim().split( rnewline ).pop();
}
const authors = await getAuthors();
const authorsTxt = "Authors ordered by first contribution.\n\n" + authors.join( "\n" ) + "\n";
- await fs.promises.writeFile( "AUTHORS.txt", authorsTxt );
+ await fs.writeFile( "AUTHORS.txt", authorsTxt );
console.log( "AUTHORS.txt updated" );
}
"use strict";
-const fs = require( "fs" );
+const fs = require( "node:fs" );
const shell = require( "shelljs" );
-const path = require( "path" );
-const os = require( "os" );
+const path = require( "node:path" );
+const os = require( "node:os" );
const cdnFolderContainer = "dist/cdn";
const cdnFolderVersioned = `${ cdnFolderContainer }/versioned`;
].forEach( ( { filesMap, cdnFolder } ) => {
shell.mkdir( "-p", cdnFolder );
- Object.keys( filesMap ).forEach( key => {
+ Object.keys( filesMap ).forEach( ( key ) => {
let text;
const builtFile = filesMap[ key ];
const unpathedFile = key.replace( /@VER/g, Release.newVersion );
// Map files need to reference the new uncompressed name;
// assume that all files reside in the same directory.
// "file":"jquery.min.js" ... "sources":["jquery.js"]
- text = fs.readFileSync( builtFile, "utf8" )
- .replace( /"file":"([^"]+)"/,
- "\"file\":\"" + unpathedFile.replace( /\.min\.map/, ".min.js\"" ) )
- .replace( /"sources":\["([^"]+)"\]/,
- "\"sources\":[\"" + unpathedFile.replace( /\.min\.map/, ".js" ) + "\"]" );
+ text = fs
+ .readFileSync( builtFile, "utf8" )
+ .replace(
+ /"file":"([^"]+)"/,
+ `"file":"${ unpathedFile.replace( /\.min\.map/, ".min.js" ) }"`
+ )
+ .replace(
+ /"sources":\["([^"]+)"\]/,
+ `"sources":["${ unpathedFile.replace( /\.min\.map/, ".js" ) }"]`
+ );
fs.writeFileSync( releaseFile, text );
} else if ( builtFile !== releaseFile ) {
shell.cp( "-f", builtFile, releaseFile );
}
} );
-
} );
}
async function makeArchives( Release ) {
-
Release.chdir( Release.dir.repo );
async function makeArchive( { cdn, filesMap, cdnFolder } ) {
return new Promise( ( resolve, reject ) => {
if ( Release.preRelease ) {
- console.log( "Skipping archive creation for " + cdn + "; this is a beta release." );
+ console.log(
+ `Skipping archive creation for ${ cdn }; this is a beta release.`
+ );
resolve();
return;
}
output.on( "close", resolve );
- output.on( "error", err => {
+ output.on( "error", ( err ) => {
reject( err );
} );
let finalFilesMap = Object.create( null );
for ( const [ releaseFile, builtFile ] of Object.entries( filesMap ) ) {
- finalFilesMap[ releaseFile.replace( rver, Release.newVersion ) ] = builtFile;
+ finalFilesMap[ releaseFile.replace( rver, Release.newVersion ) ] =
+ builtFile;
}
- const files = Object
- .keys( filesMap )
- .map( item => `${ cdnFolder }/${
- item.replace( rver, Release.newVersion )
- }` );
+ const files = Object.keys( filesMap ).map(
+ ( item ) => `${ cdnFolder }/${ item.replace( rver, Release.newVersion ) }`
+ );
if ( os.platform() === "win32" ) {
sum = [];
for ( i = 0; i < files.length; i++ ) {
result = Release.exec(
- "certutil -hashfile " + files[ i ] + " MD5", "Error retrieving md5sum"
+ "certutil -hashfile " + files[ i ] + " MD5",
+ "Error retrieving md5sum"
);
sum.push( rmd5.exec( result )[ 0 ] + " " + files[ i ] );
}
sum = sum.join( "\n" );
} else {
- sum = Release.exec( "md5 -r " + files.join( " " ), "Error retrieving md5sum" );
+ sum = Release.exec(
+ "md5 -r " + files.join( " " ),
+ "Error retrieving md5sum"
+ );
}
fs.writeFileSync( md5file, sum );
files.push( md5file );
- files.forEach( file => {
- archiver.append( fs.createReadStream( file ),
- { name: path.basename( file ) } );
+ files.forEach( ( file ) => {
+ archiver.append( fs.createReadStream( file ), {
+ name: path.basename( file )
+ } );
} );
archiver.finalize();
module.exports = function( Release, files, complete ) {
- const fs = require( "fs" ).promises;
+ const fs = require( "node:fs/promises" );
const shell = require( "shelljs" );
const inquirer = require( "inquirer" );
const pkg = require( `${ Release.dir.repo }/package.json` );
import chalk from "chalk";
-import fs from "node:fs";
+import fs from "node:fs/promises";
import { promisify } from "node:util";
import zlib from "node:zlib";
import { exec as nodeExec } from "node:child_process";
async function getCache( loc ) {
let cache;
try {
- const contents = await fs.promises.readFile( loc, "utf8" );
+ const contents = await fs.readFile( loc, "utf8" );
cache = JSON.parse( contents );
} catch ( err ) {
return {};
}
function saveCache( loc, cache ) {
- return fs.promises.writeFile( loc, JSON.stringify( cache ) );
+ return fs.writeFile( loc, JSON.stringify( cache ) );
}
function compareSizes( existing, current, padLength ) {
const results = await Promise.all(
files.map( async function( filename ) {
- let contents = await fs.promises.readFile( filename, "utf8" );
+ let contents = await fs.readFile( filename, "utf8" );
// Remove the short SHA and .dirty from comparisons.
// The short SHA so commits can be compared against each other
"use strict";
-const util = require( "util" );
-const exec = util.promisify( require( "child_process" ).exec );
+const util = require( "node:util" );
+const exec = util.promisify( require( "node:child_process" ).exec );
module.exports = async function isCleanWorkingDir() {
const { stdout } = await exec( "git status --untracked-files=no --porcelain" );
"use strict";
const swc = require( "@swc/core" );
-const fs = require( "fs" );
-const path = require( "path" );
+const fs = require( "node:fs/promises" );
+const path = require( "node:path" );
const processForDist = require( "./dist" );
const getTimestamp = require( "./lib/getTimestamp" );
const rjs = /\.js$/;
module.exports = async function minify( { filename, dir, esm } ) {
- const contents = await fs.promises.readFile( path.join( dir, filename ), "utf8" );
+ const contents = await fs.readFile( path.join( dir, filename ), "utf8" );
const version = /jQuery JavaScript Library ([^\n]+)/.exec( contents )[ 1 ];
const { code, map: incompleteMap } = await swc.minify(
} );
await Promise.all( [
- fs.promises.writeFile(
+ fs.writeFile(
path.join( dir, minFilename ),
code
),
- fs.promises.writeFile(
+ fs.writeFile(
path.join( dir, mapFilename ),
map
)
"use strict";
-const fs = require( "fs" );
-const util = require( "util" );
-const exec = util.promisify( require( "child_process" ).exec );
+const fs = require( "node:fs/promises" );
+const util = require( "node:util" );
+const exec = util.promisify( require( "node:child_process" ).exec );
const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
const allowedLibraryTypes = [ "regular", "factory" ];
} ${ sourceType } "${ module }"` );
}
const dir = `./test/node_smoke_tests/${ sourceType }/${ libraryType }`;
- const files = await fs.promises.readdir( dir, { withFileTypes: true } );
+ const files = await fs.readdir( dir, { withFileTypes: true } );
const testFiles = files.filter( ( testFilePath ) => testFilePath.isFile() );
if ( !testFiles.length ) {
"use strict";
-const fs = require( "fs" );
-const path = require( "path" );
+const fs = require( "node:fs/promises" );
+const path = require( "node:path" );
const projectDir = path.resolve( __dirname, "..", ".." );
};
async function npmcopy() {
- await fs.promises.mkdir( path.resolve( projectDir, "external" ), {
+ await fs.mkdir( path.resolve( projectDir, "external" ), {
recursive: true
} );
for ( const [ dest, source ] of Object.entries( files ) ) {
const from = path.resolve( projectDir, "node_modules", source );
const to = path.resolve( projectDir, "external", dest );
const toDir = path.dirname( to );
- await fs.promises.mkdir( toDir, { recursive: true } );
- await fs.promises.copyFile( from, to );
+ await fs.mkdir( toDir, { recursive: true } );
+ await fs.copyFile( from, to );
console.log( `${ source } → ${ dest }` );
}
}
"use strict";
-const { spawn } = require( "child_process" );
+const { spawn } = require( "node:child_process" );
const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
-const path = require( "path" );
-const os = require( "os" );
+const path = require( "node:path" );
+const os = require( "node:os" );
if ( !verifyNodeVersion() ) {
return;
"use strict";
-const fs = require( "fs" );
+const fs = require( "node:fs/promises" );
async function generateFixture() {
- const fixture = await fs.promises.readFile( "./test/data/qunit-fixture.html", "utf8" );
- await fs.promises.writeFile(
+ const fixture = await fs.readFile( "./test/data/qunit-fixture.html", "utf8" );
+ await fs.writeFile(
"./test/data/qunit-fixture.js",
"// Generated by build/tasks/qunit-fixture.js\n" +
"QUnit.config.fixture = " +
import bodyParser from "body-parser";
import express from "express";
import bodyParserErrorHandler from "express-body-parser-error-handler";
-import { readFile } from "fs/promises";
+import { readFile } from "node:fs/promises";
import mockServer from "../middleware-mockserver.cjs";
export async function createTestServer( report ) {