"use strict";
-var fs = require( "fs" );
+const fs = require( "node:fs" );
module.exports = function( Release ) {
"use strict";
-const fs = require( "node:fs" );
+const fs = require( "node:fs/promises" );
const util = require( "node:util" );
const exec = util.promisify( require( "node:child_process" ).exec );
const rnewline = /\r?\n/;
}
async function getLastAuthor() {
- const authorsTxt = await fs.promises.readFile( "AUTHORS.txt", "utf8" );
+ const authorsTxt = await fs.readFile( "AUTHORS.txt", "utf8" );
return authorsTxt.trim().split( rnewline ).pop();
}
const authors = await getAuthors();
const authorsTxt = "Authors ordered by first contribution.\n\n" + authors.join( "\n" ) + "\n";
- await fs.promises.writeFile( "AUTHORS.txt", authorsTxt );
+ await fs.writeFile( "AUTHORS.txt", authorsTxt );
console.log( "AUTHORS.txt updated" );
}
"use strict";
-var
- fs = require( "fs" ),
+var fs = require( "node:fs" ),
shell = require( "shelljs" ),
- path = require( "path" ),
- os = require( "os" ),
-
+ path = require( "node:path" ),
+ os = require( "node:os" ),
cdnFolder = "dist/cdn",
-
releaseFiles = {
"jquery-VER.js": "dist/jquery.js",
"jquery-VER.min.js": "dist/jquery.min.js",
"jquery-VER.slim.min.js": "dist/jquery.slim.min.js",
"jquery-VER.slim.min.map": "dist/jquery.slim.min.map"
},
-
googleFilesCDN = [
- "jquery.js", "jquery.min.js", "jquery.min.map",
- "jquery.slim.js", "jquery.slim.min.js", "jquery.slim.min.map"
+ "jquery.js",
+ "jquery.min.js",
+ "jquery.min.map",
+ "jquery.slim.js",
+ "jquery.slim.min.js",
+ "jquery.slim.min.map"
],
-
msFilesCDN = [
- "jquery-VER.js", "jquery-VER.min.js", "jquery-VER.min.map",
- "jquery-VER.slim.js", "jquery-VER.slim.min.js", "jquery-VER.slim.min.map"
+ "jquery-VER.js",
+ "jquery-VER.min.js",
+ "jquery-VER.min.map",
+ "jquery-VER.slim.js",
+ "jquery-VER.slim.min.js",
+ "jquery-VER.slim.min.map"
];
/**
// Map files need to reference the new uncompressed name;
// assume that all files reside in the same directory.
// "file":"jquery.min.js" ... "sources":["jquery.js"]
- text = fs.readFileSync( builtFile, "utf8" )
- .replace( /"file":"([^"]+)"/,
- "\"file\":\"" + unpathedFile.replace( /\.min\.map/, ".min.js\"" ) )
- .replace( /"sources":\["([^"]+)"\]/,
- "\"sources\":[\"" + unpathedFile.replace( /\.min\.map/, ".js" ) + "\"]" );
+ text = fs
+ .readFileSync( builtFile, "utf8" )
+ .replace(
+ /"file":"([^"]+)"/,
+ `"file":"${ unpathedFile.replace( /\.min\.map/, ".min.js" ) }"`
+ )
+ .replace(
+ /"sources":\["([^"]+)"\]/,
+ `"sources":["${ unpathedFile.replace( /\.min\.map/, ".js" ) }"]`
+ );
fs.writeFileSync( releaseFile, text );
} else if ( builtFile !== releaseFile ) {
shell.cp( "-f", builtFile, releaseFile );
}
function makeArchives( Release, callback ) {
-
Release.chdir( Release.dir.repo );
function makeArchive( cdn, files, callback ) {
if ( Release.preRelease ) {
- console.log( "Skipping archive creation for " + cdn + "; this is a beta release." );
+ console.log(
+ `Skipping archive creation for ${ cdn }; this is a beta release.`
+ );
callback();
return;
}
console.log( "Creating production archive for " + cdn );
- var i, sum, result,
+ var i,
+ sum,
+ result,
archiver = require( "archiver" )( "zip" ),
md5file = cdnFolder + "/" + cdn + "-md5.txt",
output = fs.createWriteStream(
archiver.pipe( output );
files = files.map( function( item ) {
- return "dist" + ( rver.test( item ) ? "/cdn" : "" ) + "/" +
- item.replace( rver, Release.newVersion );
+ return (
+ "dist" +
+ ( rver.test( item ) ? "/cdn" : "" ) +
+ "/" +
+ item.replace( rver, Release.newVersion )
+ );
} );
if ( os.platform() === "win32" ) {
sum = [];
for ( i = 0; i < files.length; i++ ) {
result = Release.exec(
- "certutil -hashfile " + files[ i ] + " MD5", "Error retrieving md5sum"
+ "certutil -hashfile " + files[ i ] + " MD5",
+ "Error retrieving md5sum"
);
sum.push( rmd5.exec( result )[ 0 ] + " " + files[ i ] );
}
sum = sum.join( "\n" );
} else {
- sum = Release.exec( "md5 -r " + files.join( " " ), "Error retrieving md5sum" );
+ sum = Release.exec(
+ "md5 -r " + files.join( " " ),
+ "Error retrieving md5sum"
+ );
}
fs.writeFileSync( md5file, sum );
files.push( md5file );
files.forEach( function( file ) {
- archiver.append( fs.createReadStream( file ),
- { name: path.basename( file ) } );
+ archiver.append( fs.createReadStream( file ), { name: path.basename( file ) } );
} );
archiver.finalize();
module.exports = function( Release, files, complete ) {
- const fs = require( "fs" ).promises;
+ const fs = require( "node:fs/promises" );
const shell = require( "shelljs" );
const inquirer = require( "inquirer" );
const pkg = require( `${ Release.dir.repo }/package.json` );
import chalk from "chalk";
-import fs from "node:fs";
+import fs from "node:fs/promises";
import { promisify } from "node:util";
import zlib from "node:zlib";
import { exec as nodeExec } from "node:child_process";
async function getCache( loc ) {
let cache;
try {
- const contents = await fs.promises.readFile( loc, "utf8" );
+ const contents = await fs.readFile( loc, "utf8" );
cache = JSON.parse( contents );
} catch ( err ) {
return {};
}
function saveCache( loc, cache ) {
- return fs.promises.writeFile( loc, JSON.stringify( cache ) );
+ return fs.writeFile( loc, JSON.stringify( cache ) );
}
function compareSizes( existing, current, padLength ) {
const results = await Promise.all(
files.map( async function( filename ) {
- let contents = await fs.promises.readFile( filename, "utf8" );
+ let contents = await fs.readFile( filename, "utf8" );
// Remove the short SHA and .dirty from comparisons.
// The short SHA so commits can be compared against each other
"use strict";
-const util = require( "util" );
-const exec = util.promisify( require( "child_process" ).exec );
+const util = require( "node:util" );
+const exec = util.promisify( require( "node:child_process" ).exec );
module.exports = async function isCleanWorkingDir() {
const { stdout } = await exec( "git status --untracked-files=no --porcelain" );
"use strict";
const UglifyJS = require( "uglify-js" );
-const fs = require( "fs" );
-const path = require( "path" );
+const fs = require( "node:fs/promises" );
+const path = require( "node:path" );
const processForDist = require( "./dist" );
const getTimestamp = require( "./lib/getTimestamp" );
module.exports = async function minify( { dir, filename } ) {
const filepath = path.join( dir, filename );
- const contents = await fs.promises.readFile( filepath, "utf8" );
+ const contents = await fs.readFile( filepath, "utf8" );
const version = /jQuery JavaScript Library ([^\n]+)/.exec( contents )[ 1 ];
const banner = `/*! jQuery ${ version }` +
" | (c) OpenJS Foundation and other contributors" +
} );
await Promise.all( [
- fs.promises.writeFile(
+ fs.writeFile(
path.join( dir, minFilename ),
code
),
- fs.promises.writeFile(
+ fs.writeFile(
path.join( dir, mapFilename ),
map
)
"use strict";
-const fs = require( "fs" );
-const util = require( "util" );
-const exec = util.promisify( require( "child_process" ).exec );
+const fs = require( "node:fs/promises" );
+const util = require( "node:util" );
+const exec = util.promisify( require( "node:child_process" ).exec );
const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
if ( !verifyNodeVersion() ) {
async function runTests( { module } ) {
const dir = "./test/node_smoke_tests";
- const files = await fs.promises.readdir( dir, { withFileTypes: true } );
+ const files = await fs.readdir( dir, { withFileTypes: true } );
const testFiles = files.filter( ( testFilePath ) => testFilePath.isFile() );
if ( !testFiles.length ) {
"use strict";
-const fs = require( "fs" );
-const path = require( "path" );
+const fs = require( "node:fs/promises" );
+const path = require( "node:path" );
const projectDir = path.resolve( __dirname, "..", ".." );
};
async function npmcopy() {
- await fs.promises.mkdir( path.resolve( projectDir, "external" ), {
+ await fs.mkdir( path.resolve( projectDir, "external" ), {
recursive: true
} );
for ( const [ dest, source ] of Object.entries( files ) ) {
const from = path.resolve( projectDir, "node_modules", source );
const to = path.resolve( projectDir, "external", dest );
const toDir = path.dirname( to );
- await fs.promises.mkdir( toDir, { recursive: true } );
- await fs.promises.copyFile( from, to );
+ await fs.mkdir( toDir, { recursive: true } );
+ await fs.copyFile( from, to );
console.log( `${ source } → ${ dest }` );
}
}
"use strict";
-const { spawn } = require( "child_process" );
+const { spawn } = require( "node:child_process" );
const verifyNodeVersion = require( "./lib/verifyNodeVersion" );
-const path = require( "path" );
-const os = require( "os" );
+const path = require( "node:path" );
+const os = require( "node:os" );
if ( !verifyNodeVersion() ) {
return;
"use strict";
-const fs = require( "fs" );
+const fs = require( "node:fs/promises" );
async function generateFixture() {
- const fixture = await fs.promises.readFile( "./test/data/qunit-fixture.html", "utf8" );
- await fs.promises.writeFile(
+ const fixture = await fs.readFile( "./test/data/qunit-fixture.html", "utf8" );
+ await fs.writeFile(
"./test/data/qunit-fixture.js",
"// Generated by build/tasks/qunit-fixture.js\n" +
"QUnit.config.fixture = " +
import bodyParser from "body-parser";
import express from "express";
import bodyParserErrorHandler from "express-body-parser-error-handler";
-import { readFile } from "fs/promises";
+import { readFile } from "node:fs/promises";
import mockServer from "../middleware-mockserver.js";
export async function createTestServer( report ) {