feat(esm): convert to esm (#2569)

for #2543

BREAKING CHANGE: semantic-release is now ESM-only. since it is used through its own executable, the impact on consuming projects should be minimal

BREAKING CHANGE: references to plugin files in configs need to include the file extension because of executing in an ESM context
This commit is contained in:
Matt Travi 2022-11-11 09:24:06 -06:00 committed by GitHub
parent 4012f75386
commit 9eab1adb9d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
67 changed files with 3001 additions and 1761 deletions

View File

@ -1,20 +1,22 @@
#!/usr/bin/env node #!/usr/bin/env node
// Bad news: We have to write plain ES5 in this file
// Good news: It's the only file of the entire project
/* eslint-disable no-var */ /* eslint-disable no-var */
var semver = require('semver'); import semver from 'semver';
var execa = require('execa'); import { execa } from 'execa';
var findVersions = require('find-versions'); import findVersions from 'find-versions';
var pkg = require('../package.json'); import cli from '../cli.js';
import {createRequire} from 'node:module';
var MIN_GIT_VERSION = '2.7.1'; const require = createRequire(import.meta.url);
const { engines } = require('../package.json');
const { satisfies, lt } = semver;
if (!semver.satisfies(process.version, pkg.engines.node)) { const MIN_GIT_VERSION = '2.7.1';
if (!satisfies(process.version, engines.node)) {
console.error( console.error(
`[semantic-release]: node version ${pkg.engines.node} is required. Found ${process.version}. `[semantic-release]: node version ${engines.node} is required. Found ${process.version}.
See https://github.com/semantic-release/semantic-release/blob/master/docs/support/node-version.md for more details and solutions.` See https://github.com/semantic-release/semantic-release/blob/master/docs/support/node-version.md for more details and solutions.`
); );
@ -23,8 +25,8 @@ See https://github.com/semantic-release/semantic-release/blob/master/docs/suppor
execa('git', ['--version']) execa('git', ['--version'])
.then(({stdout}) => { .then(({stdout}) => {
var gitVersion = findVersions(stdout)[0]; const gitVersion = findVersions(stdout)[0];
if (semver.lt(gitVersion, MIN_GIT_VERSION)) { if (lt(gitVersion, MIN_GIT_VERSION)) {
console.error(`[semantic-release]: Git version ${MIN_GIT_VERSION} is required. Found ${gitVersion}.`); console.error(`[semantic-release]: Git version ${MIN_GIT_VERSION} is required. Found ${gitVersion}.`);
process.exit(1); process.exit(1);
} }
@ -36,7 +38,7 @@ execa('git', ['--version'])
}); });
// Node 10+ from this point on // Node 10+ from this point on
require('../cli')() cli()
.then((exitCode) => { .then((exitCode) => {
process.exitCode = exitCode; process.exitCode = exitCode;
}) })

22
cli.js
View File

@ -1,6 +1,7 @@
const {argv, env, stderr} = require('process'); // eslint-disable-line node/prefer-global/process import util from 'node:util';
const util = require('util'); import yargs from 'yargs';
const hideSensitive = require('./lib/hide-sensitive'); import {hideBin} from 'yargs/helpers';
import hideSensitive from './lib/hide-sensitive.js';
const stringList = { const stringList = {
type: 'string', type: 'string',
@ -11,8 +12,8 @@ const stringList = {
: values.reduce((values, value) => values.concat(value.split(',').map((value) => value.trim())), []), : values.reduce((values, value) => values.concat(value.split(',').map((value) => value.trim())), []),
}; };
module.exports = async () => { export default async () => {
const cli = require('yargs') const cli = yargs(hideBin(process.argv))
.command('$0', 'Run automated package publishing', (yargs) => { .command('$0', 'Run automated package publishing', (yargs) => {
yargs.demandCommand(0, 0).usage(`Run automated package publishing yargs.demandCommand(0, 0).usage(`Run automated package publishing
@ -36,12 +37,11 @@ Usage:
.option('debug', {describe: 'Output debugging information', type: 'boolean', group: 'Options'}) .option('debug', {describe: 'Output debugging information', type: 'boolean', group: 'Options'})
.option('d', {alias: 'dry-run', describe: 'Skip publishing', type: 'boolean', group: 'Options'}) .option('d', {alias: 'dry-run', describe: 'Skip publishing', type: 'boolean', group: 'Options'})
.option('h', {alias: 'help', group: 'Options'}) .option('h', {alias: 'help', group: 'Options'})
.option('v', {alias: 'version', group: 'Options'})
.strict(false) .strict(false)
.exitProcess(false); .exitProcess(false);
try { try {
const {help, version, ...options} = cli.parse(argv.slice(2)); const {help, version, ...options} = cli.parse(process.argv.slice(2));
if (Boolean(help) || Boolean(version)) { if (Boolean(help) || Boolean(version)) {
return 0; return 0;
@ -49,16 +49,16 @@ Usage:
if (options.debug) { if (options.debug) {
// Debug must be enabled before other requires in order to work // Debug must be enabled before other requires in order to work
require('debug').enable('semantic-release:*'); (await import('debug')).default.enable('semantic-release:*');
} }
await require('.')(options); await (await import('./index.js')).default(options);
return 0; return 0;
} catch (error) { } catch (error) {
if (error.name !== 'YError') { if (error.name !== 'YError') {
stderr.write(hideSensitive(env)(util.inspect(error, {colors: true}))); process.stderr.write(hideSensitive(process.env)(util.inspect(error, {colors: true})));
} }
return 1; return 1;
} }
}; }

View File

@ -1,24 +1,27 @@
const {pick} = require('lodash'); import {createRequire} from 'node:module';
const marked = require('marked'); import {pick} from 'lodash-es';
const envCi = require('env-ci'); import * as marked from 'marked';
const hookStd = require('hook-std'); import envCi from 'env-ci';
const semver = require('semver'); import {hookStdout} from 'hook-std';
const AggregateError = require('aggregate-error'); import semver from 'semver';
import AggregateError from 'aggregate-error';
import hideSensitive from './lib/hide-sensitive.js';
import getConfig from './lib/get-config.js';
import verify from './lib/verify.js';
import getNextVersion from './lib/get-next-version.js';
import getCommits from './lib/get-commits.js';
import getLastRelease from './lib/get-last-release.js';
import getReleaseToAdd from './lib/get-release-to-add.js';
import {extractErrors, makeTag} from './lib/utils.js';
import getGitAuthUrl from './lib/get-git-auth-url.js';
import getBranches from './lib/branches/index.js';
import getLogger from './lib/get-logger.js';
import {addNote, getGitHead, getTagHead, isBranchUpToDate, push, pushNotes, tag, verifyAuth} from './lib/git.js';
import getError from './lib/get-error.js';
import {COMMIT_EMAIL, COMMIT_NAME} from './lib/definitions/constants.js';
const require = createRequire(import.meta.url);
const pkg = require('./package.json'); const pkg = require('./package.json');
const hideSensitive = require('./lib/hide-sensitive');
const getConfig = require('./lib/get-config');
const verify = require('./lib/verify');
const getNextVersion = require('./lib/get-next-version');
const getCommits = require('./lib/get-commits');
const getLastRelease = require('./lib/get-last-release');
const getReleaseToAdd = require('./lib/get-release-to-add');
const {extractErrors, makeTag} = require('./lib/utils');
const getGitAuthUrl = require('./lib/get-git-auth-url');
const getBranches = require('./lib/branches');
const getLogger = require('./lib/get-logger');
const {verifyAuth, isBranchUpToDate, getGitHead, tag, push, pushNotes, getTagHead, addNote} = require('./lib/git');
const getError = require('./lib/get-error');
const {COMMIT_NAME, COMMIT_EMAIL} = require('./lib/definitions/constants');
let markedOptionsSet = false; let markedOptionsSet = false;
async function terminalOutput(text) { async function terminalOutput(text) {
@ -41,7 +44,7 @@ async function run(context, plugins) {
logger.warn('This run was not triggered in a known CI environment, running in dry-run mode.'); logger.warn('This run was not triggered in a known CI environment, running in dry-run mode.');
options.dryRun = true; options.dryRun = true;
} else { } else {
// When running on CI, set the commits author and commiter info and prevent the `git` CLI to prompt for username/password. See #703. // When running on CI, set the commits author and committer info and prevent the `git` CLI to prompt for username/password. See #703.
Object.assign(env, { Object.assign(env, {
GIT_AUTHOR_NAME: COMMIT_NAME, GIT_AUTHOR_NAME: COMMIT_NAME,
GIT_AUTHOR_EMAIL: COMMIT_EMAIL, GIT_AUTHOR_EMAIL: COMMIT_EMAIL,
@ -247,8 +250,8 @@ async function callFail(context, plugins, err) {
} }
} }
module.exports = async (cliOptions = {}, {cwd = process.cwd(), env = process.env, stdout, stderr} = {}) => { export default async (cliOptions = {}, {cwd = process.cwd(), env = process.env, stdout, stderr} = {}) => {
const {unhook} = hookStd( const {unhook} = hookStdout(
{silent: false, streams: [process.stdout, process.stderr, stdout, stderr].filter(Boolean)}, {silent: false, streams: [process.stdout, process.stderr, stdout, stderr].filter(Boolean)},
hideSensitive(env) hideSensitive(env)
); );
@ -278,4 +281,4 @@ module.exports = async (cliOptions = {}, {cwd = process.cwd(), env = process.env
unhook(); unhook();
throw error; throw error;
} }
}; }

View File

@ -1,8 +1,8 @@
const {isString, remove, omit, mapValues, template} = require('lodash'); import {isString, mapValues, omit, remove, template} from 'lodash-es';
const micromatch = require('micromatch'); import micromatch from 'micromatch';
const {getBranches} = require('../git'); import {getBranches} from '../git.js';
module.exports = async (repositoryUrl, {cwd}, branches) => { export default async (repositoryUrl, {cwd}, branches) => {
const gitBranches = await getBranches(repositoryUrl, {cwd}); const gitBranches = await getBranches(repositoryUrl, {cwd});
return branches.reduce( return branches.reduce(
@ -15,4 +15,4 @@ module.exports = async (repositoryUrl, {cwd}, branches) => {
], ],
[] []
); );
}; }

View File

@ -1,10 +1,13 @@
const {template, escapeRegExp} = require('lodash'); import {escapeRegExp, template} from 'lodash-es';
const semver = require('semver'); import semver from 'semver';
const pReduce = require('p-reduce'); import pReduce from 'p-reduce';
const debug = require('debug')('semantic-release:get-tags'); import debugTags from 'debug';
const {getTags, getNote} = require('../../lib/git'); import {getNote, getTags} from '../../lib/git.js';
module.exports = async ({cwd, env, options: {tagFormat}}, branches) => { const debug = debugTags('semantic-release:get-tags');
export default async ({cwd, env, options: {tagFormat}}, branches) => {
// Generate a regex to parse tags formatted with `tagFormat` // Generate a regex to parse tags formatted with `tagFormat`
// by replacing the `version` variable in the template by `(.+)`. // by replacing the `version` variable in the template by `(.+)`.
// The `tagFormat` is compiled with space as the `version` as it's an invalid tag character, // The `tagFormat` is compiled with space as the `version` as it's an invalid tag character,
@ -30,4 +33,4 @@ module.exports = async ({cwd, env, options: {tagFormat}}, branches) => {
}, },
[] []
); );
}; }

View File

@ -1,14 +1,14 @@
const {isString, isRegExp} = require('lodash'); import {isRegExp, isString} from 'lodash-es';
const AggregateError = require('aggregate-error'); import AggregateError from 'aggregate-error';
const pEachSeries = require('p-each-series'); import pEachSeries from 'p-each-series';
const DEFINITIONS = require('../definitions/branches'); import * as DEFINITIONS from '../definitions/branches.js';
const getError = require('../get-error'); import getError from '../get-error.js';
const {fetch, fetchNotes, verifyBranchName} = require('../git'); import {fetch, fetchNotes, verifyBranchName} from '../git.js';
const expand = require('./expand'); import expand from './expand.js';
const getTags = require('./get-tags'); import getTags from './get-tags.js';
const normalize = require('./normalize'); import * as normalize from './normalize.js';
module.exports = async (repositoryUrl, ciBranch, context) => { export default async (repositoryUrl, ciBranch, context) => {
const {cwd, env} = context; const {cwd, env} = context;
const remoteBranches = await expand( const remoteBranches = await expand(
@ -68,4 +68,4 @@ module.exports = async (repositoryUrl, ciBranch, context) => {
} }
return [...result.maintenance, ...result.release, ...result.prerelease]; return [...result.maintenance, ...result.release, ...result.prerelease];
}; }

View File

@ -1,19 +1,18 @@
const {sortBy, isNil} = require('lodash'); import {isNil, sortBy} from 'lodash-es';
const semverDiff = require('semver-diff'); import semverDiff from 'semver-diff';
const {FIRST_RELEASE, RELEASE_TYPE} = require('../definitions/constants'); import {FIRST_RELEASE, RELEASE_TYPE} from '../definitions/constants.js';
const { import {
tagsToVersions,
isMajorRange,
getUpperBound,
getLowerBound,
highest,
lowest,
getLatestVersion,
getFirstVersion, getFirstVersion,
getRange, getLatestVersion,
} = require('../utils'); getLowerBound, getRange,
getUpperBound,
highest,
isMajorRange,
lowest,
tagsToVersions
} from '../utils.js';
function maintenance({maintenance, release}) { export function maintenance({maintenance, release}) {
return sortBy( return sortBy(
maintenance.map(({name, range, channel, ...rest}) => ({ maintenance.map(({name, range, channel, ...rest}) => ({
...rest, ...rest,
@ -55,7 +54,7 @@ function maintenance({maintenance, release}) {
}); });
} }
function release({release}) { export function release({release}) {
if (release.length === 0) { if (release.length === 0) {
return release; return release;
} }
@ -89,7 +88,7 @@ function release({release}) {
}); });
} }
function prerelease({prerelease}) { export function prerelease({prerelease}) {
return prerelease.map(({name, prerelease, channel, tags, ...rest}) => { return prerelease.map(({name, prerelease, channel, tags, ...rest}) => {
const preid = prerelease === true ? name : prerelease; const preid = prerelease === true ? name : prerelease;
return { return {
@ -102,5 +101,3 @@ function prerelease({prerelease}) {
}; };
}); });
} }
module.exports = {maintenance, release, prerelease};

View File

@ -1,24 +1,22 @@
const {isNil, uniqBy} = require('lodash'); import {isNil, uniqBy} from 'lodash-es';
const semver = require('semver'); import semver from 'semver';
const {isMaintenanceRange} = require('../utils'); import {isMaintenanceRange} from '../utils.js';
const maintenance = { export const maintenance = {
filter: ({name, range}) => (!isNil(range) && range !== false) || isMaintenanceRange(name), filter: ({name, range}) => (!isNil(range) && range !== false) || isMaintenanceRange(name),
branchValidator: ({range}) => (isNil(range) ? true : isMaintenanceRange(range)), branchValidator: ({range}) => (isNil(range) ? true : isMaintenanceRange(range)),
branchesValidator: (branches) => uniqBy(branches, ({range}) => semver.validRange(range)).length === branches.length, branchesValidator: (branches) => uniqBy(branches, ({range}) => semver.validRange(range)).length === branches.length,
}; };
const prerelease = { export const prerelease = {
filter: ({prerelease}) => !isNil(prerelease) && prerelease !== false, filter: ({prerelease}) => !isNil(prerelease) && prerelease !== false,
branchValidator: ({name, prerelease}) => branchValidator: ({name, prerelease}) =>
Boolean(prerelease) && Boolean(semver.valid(`1.0.0-${prerelease === true ? name : prerelease}.1`)), Boolean(prerelease) && Boolean(semver.valid(`1.0.0-${prerelease === true ? name : prerelease}.1`)),
branchesValidator: (branches) => uniqBy(branches, 'prerelease').length === branches.length, branchesValidator: (branches) => uniqBy(branches, 'prerelease').length === branches.length,
}; };
const release = { export const release = {
// eslint-disable-next-line unicorn/no-fn-reference-in-iterator // eslint-disable-next-line unicorn/no-fn-reference-in-iterator
filter: (branch) => !maintenance.filter(branch) && !prerelease.filter(branch), filter: (branch) => !maintenance.filter(branch) && !prerelease.filter(branch),
branchesValidator: (branches) => branches.length <= 3 && branches.length > 0, branchesValidator: (branches) => branches.length <= 3 && branches.length > 0,
}; };
module.exports = {maintenance, prerelease, release};

View File

@ -1,29 +1,17 @@
const RELEASE_TYPE = ['patch', 'minor', 'major']; export const RELEASE_TYPE = ['patch', 'minor', 'major'];
const FIRST_RELEASE = '1.0.0'; export const FIRST_RELEASE = '1.0.0';
const FIRSTPRERELEASE = '1'; export const FIRSTPRERELEASE = '1';
const COMMIT_NAME = 'semantic-release-bot'; export const COMMIT_NAME = 'semantic-release-bot';
const COMMIT_EMAIL = 'semantic-release-bot@martynus.net'; export const COMMIT_EMAIL = 'semantic-release-bot@martynus.net';
const RELEASE_NOTES_SEPARATOR = '\n\n'; export const RELEASE_NOTES_SEPARATOR = '\n\n';
const SECRET_REPLACEMENT = '[secure]'; export const SECRET_REPLACEMENT = '[secure]';
const SECRET_MIN_SIZE = 5; export const SECRET_MIN_SIZE = 5;
const GIT_NOTE_REF = 'semantic-release'; export const GIT_NOTE_REF = 'semantic-release';
module.exports = {
RELEASE_TYPE,
FIRST_RELEASE,
FIRSTPRERELEASE,
COMMIT_NAME,
COMMIT_EMAIL,
RELEASE_NOTES_SEPARATOR,
SECRET_REPLACEMENT,
SECRET_MIN_SIZE,
GIT_NOTE_REF,
};

View File

@ -1,7 +1,10 @@
const {inspect} = require('util'); import {inspect} from 'node:util';
const {toLower, isString, trim} = require('lodash'); import {createRequire} from 'node:module';
import {isString, toLower, trim} from 'lodash-es';
import {RELEASE_TYPE} from './constants.js';
const require = createRequire(import.meta.url);
const pkg = require('../../package.json'); const pkg = require('../../package.json');
const {RELEASE_TYPE} = require('./constants');
const [homepage] = pkg.homepage.split('#'); const [homepage] = pkg.homepage.split('#');
const stringify = (object) => const stringify = (object) =>
@ -10,16 +13,19 @@ const linkify = (file) => `${homepage}/blob/master/${file}`;
const wordsList = (words) => const wordsList = (words) =>
`${words.slice(0, -1).join(', ')}${words.length > 1 ? ` or ${words[words.length - 1]}` : trim(words[0])}`; `${words.slice(0, -1).join(', ')}${words.length > 1 ? ` or ${words[words.length - 1]}` : trim(words[0])}`;
module.exports = { export function ENOGITREPO({cwd}) {
ENOGITREPO: ({cwd}) => ({ return {
message: 'Not running from a git repository.', message: 'Not running from a git repository.',
details: `The \`semantic-release\` command must be executed from a Git repository. details: `The \`semantic-release\` command must be executed from a Git repository.
The current working directory is \`${cwd}\`. The current working directory is \`${cwd}\`.
Please verify your CI configuration to make sure the \`semantic-release\` command is executed from the root of the cloned repository.`, Please verify your CI configuration to make sure the \`semantic-release\` command is executed from the root of the cloned repository.`,
}), };
ENOREPOURL: () => ({ }
export function ENOREPOURL() {
return {
message: 'The `repositoryUrl` option is required.', message: 'The `repositoryUrl` option is required.',
details: `The [repositoryUrl option](${linkify( details: `The [repositoryUrl option](${linkify(
'docs/usage/configuration.md#repositoryurl' 'docs/usage/configuration.md#repositoryurl'
@ -28,8 +34,11 @@ Please verify your CI configuration to make sure the \`semantic-release\` comman
Please make sure to add the \`repositoryUrl\` to the [semantic-release configuration] (${linkify( Please make sure to add the \`repositoryUrl\` to the [semantic-release configuration] (${linkify(
'docs/usage/configuration.md' 'docs/usage/configuration.md'
)}).`, )}).`,
}), };
EGITNOPERMISSION: ({options: {repositoryUrl}, branch: {name}}) => ({ }
export function EGITNOPERMISSION({options: {repositoryUrl}, branch: {name}}) {
return {
message: 'Cannot push to the Git repository.', message: 'Cannot push to the Git repository.',
details: `**semantic-release** cannot push the version tag to the branch \`${name}\` on the remote Git repository with URL \`${repositoryUrl}\`. details: `**semantic-release** cannot push the version tag to the branch \`${name}\` on the remote Git repository with URL \`${repositoryUrl}\`.
@ -37,42 +46,57 @@ This can be caused by:
- a misconfiguration of the [repositoryUrl](${linkify('docs/usage/configuration.md#repositoryurl')}) option - a misconfiguration of the [repositoryUrl](${linkify('docs/usage/configuration.md#repositoryurl')}) option
- the repository being unavailable - the repository being unavailable
- or missing push permission for the user configured via the [Git credentials on your CI environment](${linkify( - or missing push permission for the user configured via the [Git credentials on your CI environment](${linkify(
'docs/usage/ci-configuration.md#authentication' 'docs/usage/ci-configuration.md#authentication'
)})`, )})`,
}), };
EINVALIDTAGFORMAT: ({options: {tagFormat}}) => ({ }
export function EINVALIDTAGFORMAT({options: {tagFormat}}) {
return {
message: 'Invalid `tagFormat` option.', message: 'Invalid `tagFormat` option.',
details: `The [tagFormat](${linkify( details: `The [tagFormat](${linkify(
'docs/usage/configuration.md#tagformat' 'docs/usage/configuration.md#tagformat'
)}) must compile to a [valid Git reference](https://git-scm.com/docs/git-check-ref-format#_description). )}) must compile to a [valid Git reference](https://git-scm.com/docs/git-check-ref-format#_description).
Your configuration for the \`tagFormat\` option is \`${stringify(tagFormat)}\`.`, Your configuration for the \`tagFormat\` option is \`${stringify(tagFormat)}\`.`,
}), };
ETAGNOVERSION: ({options: {tagFormat}}) => ({ }
export function ETAGNOVERSION({options: {tagFormat}}) {
return {
message: 'Invalid `tagFormat` option.', message: 'Invalid `tagFormat` option.',
details: `The [tagFormat](${linkify( details: `The [tagFormat](${linkify(
'docs/usage/configuration.md#tagformat' 'docs/usage/configuration.md#tagformat'
)}) option must contain the variable \`version\` exactly once. )}) option must contain the variable \`version\` exactly once.
Your configuration for the \`tagFormat\` option is \`${stringify(tagFormat)}\`.`, Your configuration for the \`tagFormat\` option is \`${stringify(tagFormat)}\`.`,
}), };
EPLUGINCONF: ({type, required, pluginConf}) => ({ }
export function EPLUGINCONF({type, required, pluginConf}) {
return {
message: `The \`${type}\` plugin configuration is invalid.`, message: `The \`${type}\` plugin configuration is invalid.`,
details: `The [${type} plugin configuration](${linkify(`docs/usage/plugins.md#${toLower(type)}-plugin`)}) ${ details: `The [${type} plugin configuration](${linkify(`docs/usage/plugins.md#${toLower(type)}-plugin`)}) ${
required ? 'is required and ' : '' required ? 'is required and ' : ''
} must be a single or an array of plugins definition. A plugin definition is an npm module name, optionally wrapped in an array with an object. } must be a single or an array of plugins definition. A plugin definition is an npm module name, optionally wrapped in an array with an object.
Your configuration for the \`${type}\` plugin is \`${stringify(pluginConf)}\`.`, Your configuration for the \`${type}\` plugin is \`${stringify(pluginConf)}\`.`,
}), };
EPLUGINSCONF: ({plugin}) => ({ }
export function EPLUGINSCONF({plugin}) {
return {
message: 'The `plugins` configuration is invalid.', message: 'The `plugins` configuration is invalid.',
details: `The [plugins](${linkify( details: `The [plugins](${linkify(
'docs/usage/configuration.md#plugins' 'docs/usage/configuration.md#plugins'
)}) option must be an array of plugin definitions. A plugin definition is an npm module name, optionally wrapped in an array with an object. )}) option must be an array of plugin definitions. A plugin definition is an npm module name, optionally wrapped in an array with an object.
The invalid configuration is \`${stringify(plugin)}\`.`, The invalid configuration is \`${stringify(plugin)}\`.`,
}), };
EPLUGIN: ({pluginName, type}) => ({ }
export function EPLUGIN({pluginName, type}) {
return {
message: `A plugin configured in the step ${type} is not a valid semantic-release plugin.`, message: `A plugin configured in the step ${type} is not a valid semantic-release plugin.`,
details: `A valid \`${type}\` **semantic-release** plugin must be a function or an object with a function in the property \`${type}\`. details: `A valid \`${type}\` **semantic-release** plugin must be a function or an object with a function in the property \`${type}\`.
@ -81,8 +105,11 @@ The plugin \`${pluginName}\` doesn't have the property \`${type}\` and cannot be
Please refer to the \`${pluginName}\` and [semantic-release plugins configuration](${linkify( Please refer to the \`${pluginName}\` and [semantic-release plugins configuration](${linkify(
'docs/usage/plugins.md' 'docs/usage/plugins.md'
)}) documentation for more details.`, )}) documentation for more details.`,
}), };
EANALYZECOMMITSOUTPUT: ({result, pluginName}) => ({ }
export function EANALYZECOMMITSOUTPUT({result, pluginName}) {
return {
message: 'The `analyzeCommits` plugin returned an invalid value. It must return a valid semver release type.', message: 'The `analyzeCommits` plugin returned an invalid value. It must return a valid semver release type.',
details: `The \`analyzeCommits\` plugin must return a valid [semver](https://semver.org) release type. The valid values are: ${RELEASE_TYPE.map( details: `The \`analyzeCommits\` plugin must return a valid [semver](https://semver.org) release type. The valid values are: ${RELEASE_TYPE.map(
(type) => `\`${type}\`` (type) => `\`${type}\``
@ -97,8 +124,11 @@ We recommend to report the issue to the \`${pluginName}\` authors, providing the
- A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify( - A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify(
'docs/developer-guide/plugin.md' 'docs/developer-guide/plugin.md'
)})`, )})`,
}), };
EGENERATENOTESOUTPUT: ({result, pluginName}) => ({ }
export function EGENERATENOTESOUTPUT({result, pluginName}) {
return {
message: 'The `generateNotes` plugin returned an invalid value. It must return a `String`.', message: 'The `generateNotes` plugin returned an invalid value. It must return a `String`.',
details: `The \`generateNotes\` plugin must return a \`String\`. details: `The \`generateNotes\` plugin must return a \`String\`.
@ -111,8 +141,11 @@ We recommend to report the issue to the \`${pluginName}\` authors, providing the
- A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify( - A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify(
'docs/developer-guide/plugin.md' 'docs/developer-guide/plugin.md'
)})`, )})`,
}), };
EPUBLISHOUTPUT: ({result, pluginName}) => ({ }
export function EPUBLISHOUTPUT({result, pluginName}) {
return {
message: 'A `publish` plugin returned an invalid value. It must return an `Object`.', message: 'A `publish` plugin returned an invalid value. It must return an `Object`.',
details: `The \`publish\` plugins must return an \`Object\`. details: `The \`publish\` plugins must return an \`Object\`.
@ -125,8 +158,11 @@ We recommend to report the issue to the \`${pluginName}\` authors, providing the
- A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify( - A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify(
'docs/developer-guide/plugin.md' 'docs/developer-guide/plugin.md'
)})`, )})`,
}), };
EADDCHANNELOUTPUT: ({result, pluginName}) => ({ }
export function EADDCHANNELOUTPUT({result, pluginName}) {
return {
message: 'A `addChannel` plugin returned an invalid value. It must return an `Object`.', message: 'A `addChannel` plugin returned an invalid value. It must return an `Object`.',
details: `The \`addChannel\` plugins must return an \`Object\`. details: `The \`addChannel\` plugins must return an \`Object\`.
@ -139,48 +175,66 @@ We recommend to report the issue to the \`${pluginName}\` authors, providing the
- A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify( - A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify(
'docs/developer-guide/plugin.md' 'docs/developer-guide/plugin.md'
)})`, )})`,
}), };
EINVALIDBRANCH: ({branch}) => ({ }
export function EINVALIDBRANCH({branch}) {
return {
message: 'A branch is invalid in the `branches` configuration.', message: 'A branch is invalid in the `branches` configuration.',
details: `Each branch in the [branches configuration](${linkify( details: `Each branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches' 'docs/usage/configuration.md#branches'
)}) must be either a string, a regexp or an object with a \`name\` property. )}) must be either a string, a regexp or an object with a \`name\` property.
Your configuration for the problematic branch is \`${stringify(branch)}\`.`, Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
}), };
EINVALIDBRANCHNAME: ({branch}) => ({ }
export function EINVALIDBRANCHNAME({branch}) {
return {
message: 'A branch name is invalid in the `branches` configuration.', message: 'A branch name is invalid in the `branches` configuration.',
details: `Each branch in the [branches configuration](${linkify( details: `Each branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches' 'docs/usage/configuration.md#branches'
)}) must be a [valid Git reference](https://git-scm.com/docs/git-check-ref-format#_description). )}) must be a [valid Git reference](https://git-scm.com/docs/git-check-ref-format#_description).
Your configuration for the problematic branch is \`${stringify(branch)}\`.`, Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
}), };
EDUPLICATEBRANCHES: ({duplicates}) => ({ }
export function EDUPLICATEBRANCHES({duplicates}) {
return {
message: 'The `branches` configuration has duplicate branches.', message: 'The `branches` configuration has duplicate branches.',
details: `Each branch in the [branches configuration](${linkify( details: `Each branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches' 'docs/usage/configuration.md#branches'
)}) must havea unique name. )}) must havea unique name.
Your configuration contains duplicates for the following branch names: \`${stringify(duplicates)}\`.`, Your configuration contains duplicates for the following branch names: \`${stringify(duplicates)}\`.`,
}), };
EMAINTENANCEBRANCH: ({branch}) => ({ }
export function EMAINTENANCEBRANCH({branch}) {
return {
message: 'A maintenance branch is invalid in the `branches` configuration.', message: 'A maintenance branch is invalid in the `branches` configuration.',
details: `Each maintenance branch in the [branches configuration](${linkify( details: `Each maintenance branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches' 'docs/usage/configuration.md#branches'
)}) must have a \`range\` property formatted like \`N.x\`, \`N.x.x\` or \`N.N.x\` (\`N\` is a number). )}) must have a \`range\` property formatted like \`N.x\`, \`N.x.x\` or \`N.N.x\` (\`N\` is a number).
Your configuration for the problematic branch is \`${stringify(branch)}\`.`, Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
}), };
EMAINTENANCEBRANCHES: ({branches}) => ({ }
export function EMAINTENANCEBRANCHES({branches}) {
return {
message: 'The maintenance branches are invalid in the `branches` configuration.', message: 'The maintenance branches are invalid in the `branches` configuration.',
details: `Each maintenance branch in the [branches configuration](${linkify( details: `Each maintenance branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches' 'docs/usage/configuration.md#branches'
)}) must have a unique \`range\` property. )}) must have a unique \`range\` property.
Your configuration for the problematic branches is \`${stringify(branches)}\`.`, Your configuration for the problematic branches is \`${stringify(branches)}\`.`,
}), };
ERELEASEBRANCHES: ({branches}) => ({ }
export function ERELEASEBRANCHES({branches}) {
return {
message: 'The release branches are invalid in the `branches` configuration.', message: 'The release branches are invalid in the `branches` configuration.',
details: `A minimum of 1 and a maximum of 3 release branches are required in the [branches configuration](${linkify( details: `A minimum of 1 and a maximum of 3 release branches are required in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches' 'docs/usage/configuration.md#branches'
@ -189,24 +243,33 @@ Your configuration for the problematic branches is \`${stringify(branches)}\`.`,
This may occur if your repository does not have a release branch, such as \`master\`. This may occur if your repository does not have a release branch, such as \`master\`.
Your configuration for the problematic branches is \`${stringify(branches)}\`.`, Your configuration for the problematic branches is \`${stringify(branches)}\`.`,
}), };
EPRERELEASEBRANCH: ({branch}) => ({ }
export function EPRERELEASEBRANCH({branch}) {
return {
message: 'A pre-release branch configuration is invalid in the `branches` configuration.', message: 'A pre-release branch configuration is invalid in the `branches` configuration.',
details: `Each pre-release branch in the [branches configuration](${linkify( details: `Each pre-release branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches' 'docs/usage/configuration.md#branches'
)}) must have a \`prerelease\` property valid per the [Semantic Versioning Specification](https://semver.org/#spec-item-9). If the \`prerelease\` property is set to \`true\`, then the \`name\` property is used instead. )}) must have a \`prerelease\` property valid per the [Semantic Versioning Specification](https://semver.org/#spec-item-9). If the \`prerelease\` property is set to \`true\`, then the \`name\` property is used instead.
Your configuration for the problematic branch is \`${stringify(branch)}\`.`, Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
}), };
EPRERELEASEBRANCHES: ({branches}) => ({ }
export function EPRERELEASEBRANCHES({branches}) {
return {
message: 'The pre-release branches are invalid in the `branches` configuration.', message: 'The pre-release branches are invalid in the `branches` configuration.',
details: `Each pre-release branch in the [branches configuration](${linkify( details: `Each pre-release branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches' 'docs/usage/configuration.md#branches'
)}) must have a unique \`prerelease\` property. If the \`prerelease\` property is set to \`true\`, then the \`name\` property is used instead. )}) must have a unique \`prerelease\` property. If the \`prerelease\` property is set to \`true\`, then the \`name\` property is used instead.
Your configuration for the problematic branches is \`${stringify(branches)}\`.`, Your configuration for the problematic branches is \`${stringify(branches)}\`.`,
}), };
EINVALIDNEXTVERSION: ({nextRelease: {version}, branch: {name, range}, commits, validBranches}) => ({ }
export function EINVALIDNEXTVERSION({nextRelease: {version}, branch: {name, range}, commits, validBranches}) {
return {
message: `The release \`${version}\` on branch \`${name}\` cannot be published as it is out of range.`, message: `The release \`${version}\` on branch \`${name}\` cannot be published as it is out of range.`,
details: `Based on the releases published on other branches, only versions within the range \`${range}\` can be published from branch \`${name}\`. details: `Based on the releases published on other branches, only versions within the range \`${range}\` can be published from branch \`${name}\`.
@ -214,19 +277,22 @@ The following commit${commits.length > 1 ? 's are' : ' is'} responsible for the
${commits.map(({commit: {short}, subject}) => `- ${subject} (${short})`).join('\n')} ${commits.map(({commit: {short}, subject}) => `- ${subject} (${short})`).join('\n')}
${ ${
commits.length > 1 ? 'Those commits' : 'This commit' commits.length > 1 ? 'Those commits' : 'This commit'
} should be moved to a valid branch with [git merge](https://git-scm.com/docs/git-merge) or [git cherry-pick](https://git-scm.com/docs/git-cherry-pick) and removed from branch \`${name}\` with [git revert](https://git-scm.com/docs/git-revert) or [git reset](https://git-scm.com/docs/git-reset). } should be moved to a valid branch with [git merge](https://git-scm.com/docs/git-merge) or [git cherry-pick](https://git-scm.com/docs/git-cherry-pick) and removed from branch \`${name}\` with [git revert](https://git-scm.com/docs/git-revert) or [git reset](https://git-scm.com/docs/git-reset).
A valid branch could be ${wordsList(validBranches.map(({name}) => `\`${name}\``))}. A valid branch could be ${wordsList(validBranches.map(({name}) => `\`${name}\``))}.
See the [workflow configuration documentation](${linkify('docs/usage/workflow-configuration.md')}) for more details.`, See the [workflow configuration documentation](${linkify('docs/usage/workflow-configuration.md')}) for more details.`,
}), };
EINVALIDMAINTENANCEMERGE: ({nextRelease: {channel, gitTag, version}, branch: {mergeRange, name}}) => ({ }
export function EINVALIDMAINTENANCEMERGE({nextRelease: {channel, gitTag, version}, branch: {mergeRange, name}}) {
return {
message: `The release \`${version}\` on branch \`${name}\` cannot be published as it is out of range.`, message: `The release \`${version}\` on branch \`${name}\` cannot be published as it is out of range.`,
details: `Only releases within the range \`${mergeRange}\` can be merged into the maintenance branch \`${name}\` and published to the \`${channel}\` distribution channel. details: `Only releases within the range \`${mergeRange}\` can be merged into the maintenance branch \`${name}\` and published to the \`${channel}\` distribution channel.
The branch \`${name}\` head should be [reset](https://git-scm.com/docs/git-reset) to a previous commit so the commit with tag \`${gitTag}\` is removed from the branch history. The branch \`${name}\` head should be [reset](https://git-scm.com/docs/git-reset) to a previous commit so the commit with tag \`${gitTag}\` is removed from the branch history.
See the [workflow configuration documentation](${linkify('docs/usage/workflow-configuration.md')}) for more details.`, See the [workflow configuration documentation](${linkify('docs/usage/workflow-configuration.md')}) for more details.`,
}), };
}; }

View File

@ -1,12 +1,12 @@
/* eslint require-atomic-updates: off */ /* eslint require-atomic-updates: off */
const {isString, isPlainObject} = require('lodash'); import {isPlainObject, isString} from 'lodash-es';
const {getGitHead} = require('../git'); import {getGitHead} from '../git.js';
const hideSensitive = require('../hide-sensitive'); import hideSensitive from '../hide-sensitive.js';
const {hideSensitiveValues} = require('../utils'); import {hideSensitiveValues} from '../utils.js';
const {RELEASE_TYPE, RELEASE_NOTES_SEPARATOR} = require('./constants'); import {RELEASE_NOTES_SEPARATOR, RELEASE_TYPE} from './constants.js';
module.exports = { export default {
verifyConditions: { verifyConditions: {
required: false, required: false,
dryRun: true, dryRun: true,

View File

@ -1,5 +1,7 @@
const debug = require('debug')('semantic-release:get-commits'); import debugCommits from 'debug';
const {getCommits} = require('./git'); import {getCommits} from './git.js';
const debug = debugCommits('semantic-release:get-commits');
/** /**
* Retrieve the list of commits on the current branch since the commit sha associated with the last release, or all the commits of the current branch if there is no last released version. * Retrieve the list of commits on the current branch since the commit sha associated with the last release, or all the commits of the current branch if there is no last released version.
@ -8,7 +10,7 @@ const {getCommits} = require('./git');
* *
* @return {Promise<Array<Object>>} The list of commits on the branch `branch` since the last release. * @return {Promise<Array<Object>>} The list of commits on the branch `branch` since the last release.
*/ */
module.exports = async ({cwd, env, lastRelease: {gitHead: from}, nextRelease: {gitHead: to = 'HEAD'} = {}, logger}) => { export default async ({cwd, env, lastRelease: {gitHead: from}, nextRelease: {gitHead: to = 'HEAD'} = {}, logger}) => {
if (from) { if (from) {
debug('Use from: %s', from); debug('Use from: %s', from);
} else { } else {
@ -20,4 +22,4 @@ module.exports = async ({cwd, env, lastRelease: {gitHead: from}, nextRelease: {g
logger.log(`Found ${commits.length} commits since last release`); logger.log(`Found ${commits.length} commits since last release`);
debug('Parsed commits: %o', commits); debug('Parsed commits: %o', commits);
return commits; return commits;
}; }

View File

@ -1,16 +1,24 @@
const {castArray, pickBy, isNil, isString, isPlainObject} = require('lodash'); import {dirname, resolve} from 'node:path';
const readPkgUp = require('read-pkg-up'); import {fileURLToPath} from 'node:url';
const {cosmiconfig} = require('cosmiconfig'); import {createRequire} from 'node:module';
const resolveFrom = require('resolve-from');
const debug = require('debug')('semantic-release:config'); import {castArray, isNil, isPlainObject, isString, pickBy} from 'lodash-es';
const {repoUrl} = require('./git'); import {readPackageUp} from 'read-pkg-up';
const PLUGINS_DEFINITIONS = require('./definitions/plugins'); import {cosmiconfig} from 'cosmiconfig';
const plugins = require('./plugins'); import resolveFrom from 'resolve-from';
const {validatePlugin, parseConfig} = require('./plugins/utils'); import debugConfig from 'debug';
import {repoUrl} from './git.js';
import PLUGINS_DEFINITIONS from './definitions/plugins.js';
import plugins from './plugins/index.js';
import {parseConfig, validatePlugin} from './plugins/utils.js';
const debug = debugConfig('semantic-release:config');
const __dirname = dirname(fileURLToPath(import.meta.url));
const require = createRequire(import.meta.url);
const CONFIG_NAME = 'release'; const CONFIG_NAME = 'release';
module.exports = async (context, cliOptions) => { export default async (context, cliOptions) => {
const {cwd, env} = context; const {cwd, env} = context;
const {config, filepath} = (await cosmiconfig(CONFIG_NAME).search(cwd)) || {}; const {config, filepath} = (await cosmiconfig(CONFIG_NAME).search(cwd)) || {};
@ -25,11 +33,12 @@ module.exports = async (context, cliOptions) => {
if (extendPaths) { if (extendPaths) {
// If `extends` is defined, load and merge each shareable config with `options` // If `extends` is defined, load and merge each shareable config with `options`
options = { options = {
...castArray(extendPaths).reduce((result, extendPath) => { ...await (castArray(extendPaths).reduce(async(eventualResult, extendPath) => {
const result = await eventualResult;
const extendsOptions = require(resolveFrom.silent(__dirname, extendPath) || resolveFrom(cwd, extendPath)); const extendsOptions = require(resolveFrom.silent(__dirname, extendPath) || resolveFrom(cwd, extendPath));
// For each plugin defined in a shareable config, save in `pluginsPath` the extendable config path, // For each plugin defined in a shareable config, save in `pluginsPath` the extendable config path,
// so those plugin will be loaded relatively to the config file // so those plugin will be loaded relative to the config file
Object.entries(extendsOptions) Object.entries(extendsOptions)
.filter(([, value]) => Boolean(value)) .filter(([, value]) => Boolean(value))
.reduce((pluginsPath, [option, value]) => { .reduce((pluginsPath, [option, value]) => {
@ -47,7 +56,7 @@ module.exports = async (context, cliOptions) => {
}, pluginsPath); }, pluginsPath);
return {...result, ...extendsOptions}; return {...result, ...extendsOptions};
}, {}), }, {})),
...options, ...options,
}; };
} }
@ -70,7 +79,7 @@ module.exports = async (context, cliOptions) => {
'@semantic-release/npm', '@semantic-release/npm',
'@semantic-release/github', '@semantic-release/github',
], ],
// Remove `null` and `undefined` options so they can be replaced with default ones // Remove `null` and `undefined` options, so they can be replaced with default ones
...pickBy(options, (option) => !isNil(option)), ...pickBy(options, (option) => !isNil(option)),
...(options.branches ? {branches: castArray(options.branches)} : {}), ...(options.branches ? {branches: castArray(options.branches)} : {}),
}; };
@ -82,9 +91,9 @@ module.exports = async (context, cliOptions) => {
debug('options values: %O', options); debug('options values: %O', options);
return {options, plugins: await plugins({...context, options}, pluginsPath)}; return {options, plugins: await plugins({...context, options}, pluginsPath)};
}; }
async function pkgRepoUrl(options) { async function pkgRepoUrl(options) {
const {packageJson} = (await readPkgUp(options)) || {}; const {packageJson} = (await readPackageUp(options)) || {};
return packageJson && (isPlainObject(packageJson.repository) ? packageJson.repository.url : packageJson.repository); return packageJson && (isPlainObject(packageJson.repository) ? packageJson.repository.url : packageJson.repository);
} }

View File

@ -1,7 +1,7 @@
const SemanticReleaseError = require('@semantic-release/error'); import SemanticReleaseError from '@semantic-release/error';
const ERROR_DEFINITIONS = require('./definitions/errors'); import * as ERROR_DEFINITIONS from './definitions/errors.js';
module.exports = (code, ctx = {}) => { export default (code, ctx = {}) => {
const {message, details} = ERROR_DEFINITIONS[code](ctx); const {message, details} = ERROR_DEFINITIONS[code](ctx);
return new SemanticReleaseError(message, code, details); return new SemanticReleaseError(message, code, details);
}; }

View File

@ -1,8 +1,10 @@
const {parse, format} = require('url'); // eslint-disable-line node/no-deprecated-api import {format, parse} from 'node:url';
const {isNil} = require('lodash'); import {isNil} from 'lodash-es';
const hostedGitInfo = require('hosted-git-info'); import hostedGitInfo from 'hosted-git-info';
const {verifyAuth} = require('./git'); import debugAuthUrl from 'debug';
const debug = require('debug')('semantic-release:get-git-auth-url'); import {verifyAuth} from './git.js';
const debug = debugAuthUrl('semantic-release:get-git-auth-url');
/** /**
* Machinery to format a repository URL with the given credentials * Machinery to format a repository URL with the given credentials
@ -57,7 +59,7 @@ async function ensureValidAuthUrl({cwd, env, branch}, authUrl) {
* *
* @return {String} The formatted Git repository URL. * @return {String} The formatted Git repository URL.
*/ */
module.exports = async (context) => { export default async (context) => {
const {cwd, env, branch} = context; const {cwd, env, branch} = context;
const GIT_TOKENS = { const GIT_TOKENS = {
GIT_CREDENTIALS: undefined, GIT_CREDENTIALS: undefined,
@ -119,4 +121,4 @@ module.exports = async (context) => {
} }
return repositoryUrl; return repositoryUrl;
}; }

View File

@ -1,6 +1,6 @@
const {isUndefined} = require('lodash'); import {isUndefined} from 'lodash-es';
const semver = require('semver'); import semver from 'semver';
const {makeTag, isSameChannel} = require('./utils'); import {isSameChannel, makeTag} from './utils.js';
/** /**
* Last release. * Last release.
@ -18,7 +18,7 @@ const {makeTag, isSameChannel} = require('./utils');
* *
* - Filter out the branch tags that are not valid semantic version * - Filter out the branch tags that are not valid semantic version
* - Sort the versions * - Sort the versions
* - Retrive the highest version * - Retrieve the highest version
* *
* @param {Object} context semantic-release context. * @param {Object} context semantic-release context.
* @param {Object} params Function parameters. * @param {Object} params Function parameters.
@ -26,7 +26,7 @@ const {makeTag, isSameChannel} = require('./utils');
* *
* @return {LastRelease} The last tagged release or empty object if none is found. * @return {LastRelease} The last tagged release or empty object if none is found.
*/ */
module.exports = ({branch, options: {tagFormat}}, {before} = {}) => { export default ({branch, options: {tagFormat}}, {before} = {}) => {
const [{version, gitTag, channels} = {}] = branch.tags const [{version, gitTag, channels} = {}] = branch.tags
.filter( .filter(
(tag) => (tag) =>
@ -41,4 +41,4 @@ module.exports = ({branch, options: {tagFormat}}, {before} = {}) => {
} }
return {}; return {};
}; }

View File

@ -1,7 +1,9 @@
const {Signale} = require('signale'); import signale from 'signale';
const figures = require('figures'); import figures from 'figures';
module.exports = ({stdout, stderr}) => const {Signale} = signale;
export default ({stdout, stderr}) =>
new Signale({ new Signale({
config: {displayTimestamp: true, underlineMessage: false, displayLabel: false}, config: {displayTimestamp: true, underlineMessage: false, displayLabel: false},
disabled: false, disabled: false,
@ -13,4 +15,4 @@ module.exports = ({stdout, stderr}) =>
log: {badge: figures.info, color: 'magenta', label: '', stream: [stdout]}, log: {badge: figures.info, color: 'magenta', label: '', stream: [stdout]},
success: {badge: figures.tick, color: 'green', label: '', stream: [stdout]}, success: {badge: figures.tick, color: 'green', label: '', stream: [stdout]},
}, },
}); })

View File

@ -1,8 +1,8 @@
const semver = require('semver'); import semver from 'semver';
const {FIRST_RELEASE, FIRSTPRERELEASE} = require('./definitions/constants'); import {FIRST_RELEASE, FIRSTPRERELEASE} from './definitions/constants.js';
const {isSameChannel, getLatestVersion, tagsToVersions, highest} = require('./utils'); import {getLatestVersion, highest, isSameChannel, tagsToVersions} from './utils.js';
module.exports = ({branch, nextRelease: {type, channel}, lastRelease, logger}) => { export default ({branch, nextRelease: {type, channel}, lastRelease, logger}) => {
let version; let version;
if (lastRelease.version) { if (lastRelease.version) {
const {major, minor, patch} = semver.parse(lastRelease.version); const {major, minor, patch} = semver.parse(lastRelease.version);
@ -32,4 +32,4 @@ module.exports = ({branch, nextRelease: {type, channel}, lastRelease, logger}) =
} }
return version; return version;
}; }

View File

@ -1,8 +1,8 @@
const {uniqBy, intersection} = require('lodash'); import {intersection, uniqBy} from 'lodash-es';
const semver = require('semver'); import semver from 'semver';
const semverDiff = require('semver-diff'); import semverDiff from 'semver-diff';
const getLastRelease = require('./get-last-release'); import getLastRelease from './get-last-release.js';
const {makeTag, getLowerBound} = require('./utils'); import {getLowerBound, makeTag} from './utils.js';
/** /**
* Find releases that have been merged from from a higher branch but not added on the channel of the current branch. * Find releases that have been merged from from a higher branch but not added on the channel of the current branch.
@ -11,7 +11,7 @@ const {makeTag, getLowerBound} = require('./utils');
* *
* @return {Array<Object>} Last release and next release to be added on the channel of the current branch. * @return {Array<Object>} Last release and next release to be added on the channel of the current branch.
*/ */
module.exports = (context) => { export default (context) => {
const { const {
branch, branch,
branches, branches,
@ -57,4 +57,4 @@ module.exports = (context) => {
}, },
}; };
} }
}; }

View File

@ -1,8 +1,10 @@
const gitLogParser = require('git-log-parser'); import gitLogParser from 'git-log-parser';
const getStream = require('get-stream'); import getStream from 'get-stream';
const execa = require('execa'); import {execa} from 'execa';
const debug = require('debug')('semantic-release:git'); import debugGit from 'debug';
const {GIT_NOTE_REF} = require('./definitions/constants'); import {GIT_NOTE_REF} from './definitions/constants.js';
const debug = debugGit('semantic-release:git');
Object.assign(gitLogParser.fields, {hash: 'H', message: 'B', gitTags: 'd', committerDate: {key: 'ci', type: Date}}); Object.assign(gitLogParser.fields, {hash: 'H', message: 'B', gitTags: 'd', committerDate: {key: 'ci', type: Date}});
@ -14,7 +16,7 @@ Object.assign(gitLogParser.fields, {hash: 'H', message: 'B', gitTags: 'd', commi
* *
* @return {String} The commit sha of the tag in parameter or `null`. * @return {String} The commit sha of the tag in parameter or `null`.
*/ */
async function getTagHead(tagName, execaOptions) { export async function getTagHead(tagName, execaOptions) {
return (await execa('git', ['rev-list', '-1', tagName], execaOptions)).stdout; return (await execa('git', ['rev-list', '-1', tagName], execaOptions)).stdout;
} }
@ -27,7 +29,7 @@ async function getTagHead(tagName, execaOptions) {
* @return {Array<String>} List of git tags. * @return {Array<String>} List of git tags.
* @throws {Error} If the `git` command fails. * @throws {Error} If the `git` command fails.
*/ */
async function getTags(branch, execaOptions) { export async function getTags(branch, execaOptions) {
return (await execa('git', ['tag', '--merged', branch], execaOptions)).stdout return (await execa('git', ['tag', '--merged', branch], execaOptions)).stdout
.split('\n') .split('\n')
.map((tag) => tag.trim()) .map((tag) => tag.trim())
@ -42,7 +44,7 @@ async function getTags(branch, execaOptions) {
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
* @return {Promise<Array<Object>>} The list of commits between `from` and `to`. * @return {Promise<Array<Object>>} The list of commits between `from` and `to`.
*/ */
async function getCommits(from, to, execaOptions) { export async function getCommits(from, to, execaOptions) {
return ( return (
await getStream.array( await getStream.array(
gitLogParser.parse( gitLogParser.parse(
@ -62,7 +64,7 @@ async function getCommits(from, to, execaOptions) {
* @return {Array<String>} List of git branches. * @return {Array<String>} List of git branches.
* @throws {Error} If the `git` command fails. * @throws {Error} If the `git` command fails.
*/ */
async function getBranches(repositoryUrl, execaOptions) { export async function getBranches(repositoryUrl, execaOptions) {
return (await execa('git', ['ls-remote', '--heads', repositoryUrl], execaOptions)).stdout return (await execa('git', ['ls-remote', '--heads', repositoryUrl], execaOptions)).stdout
.split('\n') .split('\n')
.filter(Boolean) .filter(Boolean)
@ -77,7 +79,7 @@ async function getBranches(repositoryUrl, execaOptions) {
* *
* @return {Boolean} `true` if the reference exists, falsy otherwise. * @return {Boolean} `true` if the reference exists, falsy otherwise.
*/ */
async function isRefExists(ref, execaOptions) { export async function isRefExists(ref, execaOptions) {
try { try {
return (await execa('git', ['rev-parse', '--verify', ref], execaOptions)).exitCode === 0; return (await execa('git', ['rev-parse', '--verify', ref], execaOptions)).exitCode === 0;
} catch (error) { } catch (error) {
@ -99,7 +101,7 @@ async function isRefExists(ref, execaOptions) {
* @param {String} branch The repository branch to fetch. * @param {String} branch The repository branch to fetch.
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
async function fetch(repositoryUrl, branch, ciBranch, execaOptions) { export async function fetch(repositoryUrl, branch, ciBranch, execaOptions) {
const isDetachedHead = const isDetachedHead =
(await execa('git', ['rev-parse', '--abbrev-ref', 'HEAD'], {...execaOptions, reject: false})).stdout === 'HEAD'; (await execa('git', ['rev-parse', '--abbrev-ref', 'HEAD'], {...execaOptions, reject: false})).stdout === 'HEAD';
@ -137,7 +139,7 @@ async function fetch(repositoryUrl, branch, ciBranch, execaOptions) {
* @param {String} repositoryUrl The remote repository URL. * @param {String} repositoryUrl The remote repository URL.
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
async function fetchNotes(repositoryUrl, execaOptions) { export async function fetchNotes(repositoryUrl, execaOptions) {
try { try {
await execa( await execa(
'git', 'git',
@ -159,7 +161,7 @@ async function fetchNotes(repositoryUrl, execaOptions) {
* *
* @return {String} the sha of the HEAD commit. * @return {String} the sha of the HEAD commit.
*/ */
async function getGitHead(execaOptions) { export async function getGitHead(execaOptions) {
return (await execa('git', ['rev-parse', 'HEAD'], execaOptions)).stdout; return (await execa('git', ['rev-parse', 'HEAD'], execaOptions)).stdout;
} }
@ -170,7 +172,7 @@ async function getGitHead(execaOptions) {
* *
* @return {string} The value of the remote git URL. * @return {string} The value of the remote git URL.
*/ */
async function repoUrl(execaOptions) { export async function repoUrl(execaOptions) {
try { try {
return (await execa('git', ['config', '--get', 'remote.origin.url'], execaOptions)).stdout; return (await execa('git', ['config', '--get', 'remote.origin.url'], execaOptions)).stdout;
} catch (error) { } catch (error) {
@ -185,7 +187,7 @@ async function repoUrl(execaOptions) {
* *
* @return {Boolean} `true` if the current working directory is in a git repository, falsy otherwise. * @return {Boolean} `true` if the current working directory is in a git repository, falsy otherwise.
*/ */
async function isGitRepo(execaOptions) { export async function isGitRepo(execaOptions) {
try { try {
return (await execa('git', ['rev-parse', '--git-dir'], execaOptions)).exitCode === 0; return (await execa('git', ['rev-parse', '--git-dir'], execaOptions)).exitCode === 0;
} catch (error) { } catch (error) {
@ -202,7 +204,7 @@ async function isGitRepo(execaOptions) {
* *
* @throws {Error} if not authorized to push. * @throws {Error} if not authorized to push.
*/ */
async function verifyAuth(repositoryUrl, branch, execaOptions) { export async function verifyAuth(repositoryUrl, branch, execaOptions) {
try { try {
await execa('git', ['push', '--dry-run', '--no-verify', repositoryUrl, `HEAD:${branch}`], execaOptions); await execa('git', ['push', '--dry-run', '--no-verify', repositoryUrl, `HEAD:${branch}`], execaOptions);
} catch (error) { } catch (error) {
@ -220,7 +222,7 @@ async function verifyAuth(repositoryUrl, branch, execaOptions) {
* *
* @throws {Error} if the tag creation failed. * @throws {Error} if the tag creation failed.
*/ */
async function tag(tagName, ref, execaOptions) { export async function tag(tagName, ref, execaOptions) {
await execa('git', ['tag', tagName, ref], execaOptions); await execa('git', ['tag', tagName, ref], execaOptions);
} }
@ -232,7 +234,7 @@ async function tag(tagName, ref, execaOptions) {
* *
* @throws {Error} if the push failed. * @throws {Error} if the push failed.
*/ */
async function push(repositoryUrl, execaOptions) { export async function push(repositoryUrl, execaOptions) {
await execa('git', ['push', '--tags', repositoryUrl], execaOptions); await execa('git', ['push', '--tags', repositoryUrl], execaOptions);
} }
@ -244,7 +246,7 @@ async function push(repositoryUrl, execaOptions) {
* *
* @throws {Error} if the push failed. * @throws {Error} if the push failed.
*/ */
async function pushNotes(repositoryUrl, execaOptions) { export async function pushNotes(repositoryUrl, execaOptions) {
await execa('git', ['push', repositoryUrl, `refs/notes/${GIT_NOTE_REF}`], execaOptions); await execa('git', ['push', repositoryUrl, `refs/notes/${GIT_NOTE_REF}`], execaOptions);
} }
@ -256,7 +258,7 @@ async function pushNotes(repositoryUrl, execaOptions) {
* *
* @return {Boolean} `true` if valid, falsy otherwise. * @return {Boolean} `true` if valid, falsy otherwise.
*/ */
async function verifyTagName(tagName, execaOptions) { export async function verifyTagName(tagName, execaOptions) {
try { try {
return (await execa('git', ['check-ref-format', `refs/tags/${tagName}`], execaOptions)).exitCode === 0; return (await execa('git', ['check-ref-format', `refs/tags/${tagName}`], execaOptions)).exitCode === 0;
} catch (error) { } catch (error) {
@ -272,7 +274,7 @@ async function verifyTagName(tagName, execaOptions) {
* *
* @return {Boolean} `true` if valid, falsy otherwise. * @return {Boolean} `true` if valid, falsy otherwise.
*/ */
async function verifyBranchName(branch, execaOptions) { export async function verifyBranchName(branch, execaOptions) {
try { try {
return (await execa('git', ['check-ref-format', `refs/heads/${branch}`], execaOptions)).exitCode === 0; return (await execa('git', ['check-ref-format', `refs/heads/${branch}`], execaOptions)).exitCode === 0;
} catch (error) { } catch (error) {
@ -289,7 +291,7 @@ async function verifyBranchName(branch, execaOptions) {
* *
* @return {Boolean} `true` is the HEAD of the current local branch is the same as the HEAD of the remote branch, falsy otherwise. * @return {Boolean} `true` is the HEAD of the current local branch is the same as the HEAD of the remote branch, falsy otherwise.
*/ */
async function isBranchUpToDate(repositoryUrl, branch, execaOptions) { export async function isBranchUpToDate(repositoryUrl, branch, execaOptions) {
return ( return (
(await getGitHead(execaOptions)) === (await getGitHead(execaOptions)) ===
(await execa('git', ['ls-remote', '--heads', repositoryUrl, branch], execaOptions)).stdout.match(/^(?<ref>\w+)?/)[1] (await execa('git', ['ls-remote', '--heads', repositoryUrl, branch], execaOptions)).stdout.match(/^(?<ref>\w+)?/)[1]
@ -304,7 +306,7 @@ async function isBranchUpToDate(repositoryUrl, branch, execaOptions) {
* *
* @return {Object} the parsed JSON note if there is one, an empty object otherwise. * @return {Object} the parsed JSON note if there is one, an empty object otherwise.
*/ */
async function getNote(ref, execaOptions) { export async function getNote(ref, execaOptions) {
try { try {
return JSON.parse((await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'show', ref], execaOptions)).stdout); return JSON.parse((await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'show', ref], execaOptions)).stdout);
} catch (error) { } catch (error) {
@ -324,28 +326,6 @@ async function getNote(ref, execaOptions) {
* @param {String} ref The Git reference to add the note to. * @param {String} ref The Git reference to add the note to.
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
async function addNote(note, ref, execaOptions) { export async function addNote(note, ref, execaOptions) {
await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'add', '-f', '-m', JSON.stringify(note), ref], execaOptions); await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'add', '-f', '-m', JSON.stringify(note), ref], execaOptions);
} }
module.exports = {
getTagHead,
getTags,
getCommits,
getBranches,
isRefExists,
fetch,
fetchNotes,
getGitHead,
repoUrl,
isGitRepo,
verifyAuth,
tag,
push,
pushNotes,
verifyTagName,
isBranchUpToDate,
verifyBranchName,
getNote,
addNote,
};

View File

@ -1,7 +1,7 @@
const {escapeRegExp, size, isString} = require('lodash'); import {escapeRegExp, isString, size} from 'lodash-es';
const {SECRET_REPLACEMENT, SECRET_MIN_SIZE} = require('./definitions/constants'); import {SECRET_MIN_SIZE, SECRET_REPLACEMENT} from './definitions/constants.js';
module.exports = (env) => { export default (env) => {
const toReplace = Object.keys(env).filter((envVar) => { const toReplace = Object.keys(env).filter((envVar) => {
// https://github.com/semantic-release/semantic-release/issues/1558 // https://github.com/semantic-release/semantic-release/issues/1558
if (envVar === 'GOPRIVATE') { if (envVar === 'GOPRIVATE') {
@ -17,4 +17,4 @@ module.exports = (env) => {
); );
return (output) => return (output) =>
output && isString(output) && toReplace.length > 0 ? output.toString().replace(regexp, SECRET_REPLACEMENT) : output; output && isString(output) && toReplace.length > 0 ? output.toString().replace(regexp, SECRET_REPLACEMENT) : output;
}; }

View File

@ -1,12 +1,12 @@
const {identity, isPlainObject, omit, castArray, isNil, isString} = require('lodash'); import {castArray, identity, isNil, isPlainObject, isString, omit} from 'lodash-es';
const AggregateError = require('aggregate-error'); import AggregateError from 'aggregate-error';
const getError = require('../get-error'); import getError from '../get-error.js';
const PLUGINS_DEFINITIONS = require('../definitions/plugins'); import PLUGINS_DEFINITIONS from '../definitions/plugins.js';
const {validatePlugin, validateStep, loadPlugin, parseConfig} = require('./utils'); import {loadPlugin, parseConfig, validatePlugin, validateStep} from './utils.js';
const pipeline = require('./pipeline'); import pipeline from './pipeline.js';
const normalize = require('./normalize'); import normalize from './normalize.js';
module.exports = async (context, pluginsPath) => { export default async (context, pluginsPath) => {
let {options, logger} = context; let {options, logger} = context;
const errors = []; const errors = [];
@ -100,4 +100,4 @@ module.exports = async (context, pluginsPath) => {
} }
return pluginsConfig; return pluginsConfig;
}; }

View File

@ -1,11 +1,13 @@
const {isPlainObject, isFunction, noop, cloneDeep, omit} = require('lodash'); import {cloneDeep, isFunction, isPlainObject, noop, omit} from 'lodash-es';
const debug = require('debug')('semantic-release:plugins'); import debugPlugins from 'debug';
const getError = require('../get-error'); import getError from '../get-error.js';
const {extractErrors} = require('../utils'); import {extractErrors} from '../utils.js';
const PLUGINS_DEFINITIONS = require('../definitions/plugins'); import PLUGINS_DEFINITIONS from '../definitions/plugins.js';
const {loadPlugin, parseConfig} = require('./utils'); import {loadPlugin, parseConfig} from './utils.js';
module.exports = async (context, type, pluginOpt, pluginsPath) => { const debug = debugPlugins('semantic-release:plugins');
export default async (context, type, pluginOpt, pluginsPath) => {
const {stdout, stderr, options, logger} = context; const {stdout, stderr, options, logger} = context;
if (!pluginOpt) { if (!pluginOpt) {
return noop; return noop;
@ -64,4 +66,4 @@ module.exports = async (context, type, pluginOpt, pluginsPath) => {
} }
return validator; return validator;
}; }

View File

@ -1,7 +1,7 @@
const {identity} = require('lodash'); import {identity} from 'lodash-es';
const pReduce = require('p-reduce'); import pReduce from 'p-reduce';
const AggregateError = require('aggregate-error'); import AggregateError from 'aggregate-error';
const {extractErrors} = require('../utils'); import {extractErrors} from '../utils.js';
/** /**
* A Function that execute a list of function sequencially. If at least one Function ins the pipeline throws an Error or rejects, the pipeline function rejects as well. * A Function that execute a list of function sequencially. If at least one Function ins the pipeline throws an Error or rejects, the pipeline function rejects as well.
@ -25,7 +25,7 @@ const {extractErrors} = require('../utils');
* *
* @return {Pipeline} A Function that execute the `steps` sequencially * @return {Pipeline} A Function that execute the `steps` sequencially
*/ */
module.exports = (steps, {settleAll = false, getNextInput = identity, transform = identity} = {}) => async (input) => { export default (steps, {settleAll = false, getNextInput = identity, transform = identity} = {}) => async (input) => {
const results = []; const results = [];
const errors = []; const errors = [];
await pReduce( await pReduce(
@ -55,4 +55,4 @@ module.exports = (steps, {settleAll = false, getNextInput = identity, transform
} }
return results; return results;
}; }

View File

@ -1,6 +1,9 @@
const {dirname} = require('path'); import {dirname} from 'node:path';
const {isString, isFunction, castArray, isArray, isPlainObject, isNil} = require('lodash'); import {fileURLToPath} from 'node:url';
const resolveFrom = require('resolve-from'); import {castArray, isArray, isFunction, isNil, isPlainObject, isString} from 'lodash-es';
import resolveFrom from 'resolve-from';
const __dirname = dirname(fileURLToPath(import.meta.url));
const validateSteps = (conf) => { const validateSteps = (conf) => {
return conf.every((conf) => { return conf.every((conf) => {
@ -24,7 +27,7 @@ const validateSteps = (conf) => {
}); });
}; };
function validatePlugin(conf) { export function validatePlugin(conf) {
return ( return (
isString(conf) || isString(conf) ||
(isArray(conf) && (isArray(conf) &&
@ -35,7 +38,7 @@ function validatePlugin(conf) {
); );
} }
function validateStep({required}, conf) { export function validateStep({required}, conf) {
conf = castArray(conf).filter(Boolean); conf = castArray(conf).filter(Boolean);
if (required) { if (required) {
return conf.length >= 1 && validateSteps(conf); return conf.length >= 1 && validateSteps(conf);
@ -44,7 +47,7 @@ function validateStep({required}, conf) {
return conf.length === 0 || validateSteps(conf); return conf.length === 0 || validateSteps(conf);
} }
async function loadPlugin({cwd}, name, pluginsPath) { export async function loadPlugin({cwd}, name, pluginsPath) {
const basePath = pluginsPath[name] const basePath = pluginsPath[name]
? dirname(resolveFrom.silent(__dirname, pluginsPath[name]) || resolveFrom(cwd, pluginsPath[name])) ? dirname(resolveFrom.silent(__dirname, pluginsPath[name]) || resolveFrom(cwd, pluginsPath[name]))
: __dirname; : __dirname;
@ -54,7 +57,7 @@ async function loadPlugin({cwd}, name, pluginsPath) {
return isFunction(name) ? name : (await import(resolveFrom.silent(basePath, name) || resolveFrom(cwd, name))).default; return isFunction(name) ? name : (await import(resolveFrom.silent(basePath, name) || resolveFrom(cwd, name))).default;
} }
function parseConfig(plugin) { export function parseConfig(plugin) {
let path; let path;
let config; let config;
if (isArray(plugin)) { if (isArray(plugin)) {
@ -67,5 +70,3 @@ function parseConfig(plugin) {
return [path, config || {}]; return [path, config || {}];
} }
module.exports = {validatePlugin, validateStep, loadPlugin, parseConfig};

View File

@ -1,12 +1,12 @@
const {isFunction, union, template} = require('lodash'); import {isFunction, template, union} from 'lodash-es';
const semver = require('semver'); import semver from 'semver';
const hideSensitive = require('./hide-sensitive'); import hideSensitive from './hide-sensitive.js';
function extractErrors(err) { export function extractErrors(err) {
return err && isFunction(err[Symbol.iterator]) ? [...err] : [err]; return err && err.errors ? [...err.errors] : [err];
} }
function hideSensitiveValues(env, objs) { export function hideSensitiveValues(env, objs) {
const hideFunction = hideSensitive(env); const hideFunction = hideSensitive(env);
return objs.map((object) => { return objs.map((object) => {
Object.getOwnPropertyNames(object).forEach((prop) => { Object.getOwnPropertyNames(object).forEach((prop) => {
@ -18,19 +18,19 @@ function hideSensitiveValues(env, objs) {
}); });
} }
function tagsToVersions(tags) { export function tagsToVersions(tags) {
return tags.map(({version}) => version); return tags.map(({version}) => version);
} }
function isMajorRange(range) { export function isMajorRange(range) {
return /^\d+\.x(?:\.x)?$/i.test(range); return /^\d+\.x(?:\.x)?$/i.test(range);
} }
function isMaintenanceRange(range) { export function isMaintenanceRange(range) {
return /^\d+\.(?:\d+|x)(?:\.x)?$/i.test(range); return /^\d+\.(?:\d+|x)(?:\.x)?$/i.test(range);
} }
function getUpperBound(range) { export function getUpperBound(range) {
const result = semver.valid(range) const result = semver.valid(range)
? range ? range
: ((semver.validRange(range) || '').match(/<(?<upperBound>\d+\.\d+\.\d+(-\d+)?)$/) || [])[1]; : ((semver.validRange(range) || '').match(/<(?<upperBound>\d+\.\d+\.\d+(-\d+)?)$/) || [])[1];
@ -41,27 +41,27 @@ function getUpperBound(range) {
: result; : result;
} }
function getLowerBound(range) { export function getLowerBound(range) {
return ((semver.validRange(range) || '').match(/(?<lowerBound>\d+\.\d+\.\d+)/) || [])[1]; return ((semver.validRange(range) || '').match(/(?<lowerBound>\d+\.\d+\.\d+)/) || [])[1];
} }
function highest(version1, version2) { export function highest(version1, version2) {
return version1 && version2 ? (semver.gt(version1, version2) ? version1 : version2) : version1 || version2; return version1 && version2 ? (semver.gt(version1, version2) ? version1 : version2) : version1 || version2;
} }
function lowest(version1, version2) { export function lowest(version1, version2) {
return version1 && version2 ? (semver.lt(version1, version2) ? version1 : version2) : version1 || version2; return version1 && version2 ? (semver.lt(version1, version2) ? version1 : version2) : version1 || version2;
} }
function getLatestVersion(versions, {withPrerelease} = {}) { export function getLatestVersion(versions, {withPrerelease} = {}) {
return versions.filter((version) => withPrerelease || !semver.prerelease(version)).sort(semver.rcompare)[0]; return versions.filter((version) => withPrerelease || !semver.prerelease(version)).sort(semver.rcompare)[0];
} }
function getEarliestVersion(versions, {withPrerelease} = {}) { export function getEarliestVersion(versions, {withPrerelease} = {}) {
return versions.filter((version) => withPrerelease || !semver.prerelease(version)).sort(semver.compare)[0]; return versions.filter((version) => withPrerelease || !semver.prerelease(version)).sort(semver.compare)[0];
} }
function getFirstVersion(versions, lowerBranches) { export function getFirstVersion(versions, lowerBranches) {
const lowerVersion = union(...lowerBranches.map(({tags}) => tagsToVersions(tags))).sort(semver.rcompare); const lowerVersion = union(...lowerBranches.map(({tags}) => tagsToVersions(tags))).sort(semver.rcompare);
if (lowerVersion[0]) { if (lowerVersion[0]) {
return versions.sort(semver.compare).find((version) => semver.gt(version, lowerVersion[0])); return versions.sort(semver.compare).find((version) => semver.gt(version, lowerVersion[0]));
@ -70,32 +70,14 @@ function getFirstVersion(versions, lowerBranches) {
return getEarliestVersion(versions); return getEarliestVersion(versions);
} }
function getRange(min, max) { export function getRange(min, max) {
return `>=${min}${max ? ` <${max}` : ''}`; return `>=${min}${max ? ` <${max}` : ''}`;
} }
function makeTag(tagFormat, version) { export function makeTag(tagFormat, version) {
return template(tagFormat)({version}); return template(tagFormat)({version});
} }
function isSameChannel(channel, otherChannel) { export function isSameChannel(channel, otherChannel) {
return channel === otherChannel || (!channel && !otherChannel); return channel === otherChannel || (!channel && !otherChannel);
} }
module.exports = {
extractErrors,
hideSensitiveValues,
tagsToVersions,
isMajorRange,
isMaintenanceRange,
getUpperBound,
getLowerBound,
highest,
lowest,
getLatestVersion,
getEarliestVersion,
getFirstVersion,
getRange,
makeTag,
isSameChannel,
};

View File

@ -1,9 +1,9 @@
const {template, isString, isPlainObject} = require('lodash'); import {isPlainObject, isString, template} from 'lodash-es';
const AggregateError = require('aggregate-error'); import AggregateError from 'aggregate-error';
const {isGitRepo, verifyTagName} = require('./git'); import {isGitRepo, verifyTagName} from './git.js';
const getError = require('./get-error'); import getError from './get-error.js';
module.exports = async (context) => { export default async (context) => {
const { const {
cwd, cwd,
env, env,
@ -40,4 +40,4 @@ module.exports = async (context) => {
if (errors.length > 0) { if (errors.length > 0) {
throw new AggregateError(errors); throw new AggregateError(errors);
} }
}; }

2620
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -2,11 +2,16 @@
"name": "semantic-release", "name": "semantic-release",
"description": "Automated semver compliant package publishing", "description": "Automated semver compliant package publishing",
"version": "0.0.0-development", "version": "0.0.0-development",
"type": "module",
"author": "Stephan Bönnemann <stephan@boennemann.me> (http://boennemann.me)", "author": "Stephan Bönnemann <stephan@boennemann.me> (http://boennemann.me)",
"ava": { "ava": {
"files": [ "files": [
"test/**/*.test.js" "test/**/*.test.js"
], ],
"nodeArguments": [
"--loader=testdouble",
"--no-warnings"
],
"timeout": "2m" "timeout": "2m"
}, },
"bin": { "bin": {
@ -17,7 +22,8 @@
}, },
"contributors": [ "contributors": [
"Gregor Martynus (https://twitter.com/gr2m)", "Gregor Martynus (https://twitter.com/gr2m)",
"Pierre Vanduynslager (https://twitter.com/@pvdlg_)" "Pierre Vanduynslager (https://twitter.com/@pvdlg_)",
"Matt Travi <npm@travi.org> (https://matt.travi.org/)"
], ],
"dependencies": { "dependencies": {
"@semantic-release/commit-analyzer": "^9.0.2", "@semantic-release/commit-analyzer": "^9.0.2",
@ -25,29 +31,29 @@
"@semantic-release/github": "^8.0.0", "@semantic-release/github": "^8.0.0",
"@semantic-release/npm": "^9.0.0", "@semantic-release/npm": "^9.0.0",
"@semantic-release/release-notes-generator": "^10.0.0", "@semantic-release/release-notes-generator": "^10.0.0",
"aggregate-error": "^3.0.0", "aggregate-error": "^4.0.1",
"cosmiconfig": "^7.0.0", "cosmiconfig": "^7.0.0",
"debug": "^4.0.0", "debug": "^4.0.0",
"env-ci": "^5.0.0", "env-ci": "8.0.0-beta.1",
"execa": "^5.0.0", "execa": "^6.1.0",
"figures": "^3.0.0", "figures": "^5.0.0",
"find-versions": "^4.0.0", "find-versions": "^5.1.0",
"get-stream": "^6.0.0", "get-stream": "^6.0.0",
"git-log-parser": "^1.2.0", "git-log-parser": "^1.2.0",
"hook-std": "^2.0.0", "hook-std": "^3.0.0",
"hosted-git-info": "^4.0.0", "hosted-git-info": "^5.1.0",
"lodash": "^4.17.21", "lodash-es": "^4.17.21",
"marked": "^4.0.10", "marked": "^4.1.0",
"marked-terminal": "^5.0.0", "marked-terminal": "^5.1.1",
"micromatch": "^4.0.2", "micromatch": "^4.0.2",
"p-each-series": "^2.1.0", "p-each-series": "^3.0.0",
"p-reduce": "^2.0.0", "p-reduce": "^3.0.0",
"read-pkg-up": "^7.0.0", "read-pkg-up": "^9.1.0",
"resolve-from": "^5.0.0", "resolve-from": "^5.0.0",
"semver": "^7.3.2", "semver": "^7.3.2",
"semver-diff": "^3.1.1", "semver-diff": "^3.1.1",
"signale": "^1.2.1", "signale": "^1.2.1",
"yargs": "^16.2.0" "yargs": "^17.5.1"
}, },
"devDependencies": { "devDependencies": {
"ava": "4.3.3", "ava": "4.3.3",
@ -56,16 +62,16 @@
"codecov": "3.8.3", "codecov": "3.8.3",
"delay": "5.0.0", "delay": "5.0.0",
"dockerode": "3.3.4", "dockerode": "3.3.4",
"file-url": "3.0.0", "file-url": "^4.0.0",
"fs-extra": "9.1.0", "fs-extra": "^10.1.0",
"got": "11.8.5", "got": "^12.5.0",
"js-yaml": "4.1.0", "js-yaml": "4.1.0",
"mockserver-client": "5.14.0", "mockserver-client": "5.14.0",
"nock": "13.2.9", "nock": "13.2.9",
"p-retry": "4.6.2", "p-retry": "^5.1.1",
"sinon": "14.0.0", "sinon": "14.0.0",
"stream-buffers": "3.0.2", "stream-buffers": "3.0.2",
"tempy": "1.0.1", "tempy": "^3.0.0",
"testdouble": "3.16.6", "testdouble": "3.16.6",
"xo": "0.32.1" "xo": "0.32.1"
}, },
@ -122,8 +128,8 @@
"lint": "xo", "lint": "xo",
"pretest": "npm run lint", "pretest": "npm run lint",
"semantic-release": "./bin/semantic-release.js", "semantic-release": "./bin/semantic-release.js",
"test": "c8 ava -v", "test": "c8 ava --verbose",
"test:ci": "c8 ava -v" "test:ci": "c8 ava --verbose"
}, },
"xo": { "xo": {
"prettier": true, "prettier": true,

View File

@ -1,7 +1,7 @@
const test = require('ava'); import test from 'ava';
const {union} = require('lodash'); import {union} from 'lodash-es';
const semver = require('semver'); import semver from 'semver';
const td = require('testdouble'); import * as td from 'testdouble';
const getBranch = (branches, branch) => branches.find(({name}) => name === branch); const getBranch = (branches, branch) => branches.find(({name}) => name === branch);
const release = (branches, name, version) => getBranch(branches, name).tags.push({version}); const release = (branches, name, version) => getBranch(branches, name).tags.push({version});
@ -11,8 +11,21 @@ const merge = (branches, source, target, tag) => {
getBranch(branches, target).tags getBranch(branches, target).tags
); );
}; };
const remoteBranches = [];
const repositoryUrl = 'repositoryUrl';
let expand, getTags, getBranches;
test('Enforce ranges with branching release workflow', async (t) => { test.beforeEach(async (t) => {
getTags = (await td.replaceEsm('../../lib/branches/get-tags.js')).default;
expand = (await td.replaceEsm('../../lib/branches/expand.js')).default;
getBranches = (await import('../../lib/branches/index.js')).default;
})
test.afterEach.always((t) => {
td.reset();
});
test.serial('Enforce ranges with branching release workflow', async (t) => {
const branches = [ const branches = [
{name: '1.x', tags: []}, {name: '1.x', tags: []},
{name: '1.0.x', tags: []}, {name: '1.0.x', tags: []},
@ -22,14 +35,11 @@ test('Enforce ranges with branching release workflow', async (t) => {
{name: 'beta', prerelease: true, tags: []}, {name: 'beta', prerelease: true, tags: []},
{name: 'alpha', prerelease: true, tags: []}, {name: 'alpha', prerelease: true, tags: []},
]; ];
td.replace('../../lib/branches/get-tags', () => branches); const context = {options: {branches}};
td.replace('../../lib/branches/expand', () => []); td.when(expand(repositoryUrl, context, branches)).thenResolve(remoteBranches);
const getBranches = require('../../lib/branches'); td.when(getTags(context, remoteBranches)).thenResolve(branches);
let result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({ let result = (await getBranches(repositoryUrl, 'master', context)).map(({name, range}) => ({name, range,}));
name,
range,
}));
t.is(getBranch(result, '1.0.x').range, '>=1.0.0 <1.0.0', 'Cannot release on 1.0.x before a releasing on master'); t.is(getBranch(result, '1.0.x').range, '>=1.0.0 <1.0.0', 'Cannot release on 1.0.x before a releasing on master');
t.is(getBranch(result, '1.x').range, '>=1.1.0 <1.0.0', 'Cannot release on 1.x before a releasing on master'); t.is(getBranch(result, '1.x').range, '>=1.1.0 <1.0.0', 'Cannot release on 1.x before a releasing on master');
t.is(getBranch(result, 'master').range, '>=1.0.0'); t.is(getBranch(result, 'master').range, '>=1.0.0');
@ -37,10 +47,7 @@ test('Enforce ranges with branching release workflow', async (t) => {
t.is(getBranch(result, 'next-major').range, '>=1.0.0'); t.is(getBranch(result, 'next-major').range, '>=1.0.0');
release(branches, 'master', '1.0.0'); release(branches, 'master', '1.0.0');
result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({ result = (await getBranches('repositoryUrl', 'master', context)).map(({name, range}) => ({name, range}));
name,
range,
}));
t.is(getBranch(result, '1.0.x').range, '>=1.0.0 <1.0.0', 'Cannot release on 1.0.x before a releasing on master'); t.is(getBranch(result, '1.0.x').range, '>=1.0.0 <1.0.0', 'Cannot release on 1.0.x before a releasing on master');
t.is(getBranch(result, '1.x').range, '>=1.1.0 <1.0.0', 'Cannot release on 1.x before a releasing on master'); t.is(getBranch(result, '1.x').range, '>=1.1.0 <1.0.0', 'Cannot release on 1.x before a releasing on master');
t.is(getBranch(result, 'master').range, '>=1.0.0'); t.is(getBranch(result, 'master').range, '>=1.0.0');
@ -191,7 +198,7 @@ test('Enforce ranges with branching release workflow', async (t) => {
t.is(getBranch(result, '1.x').range, '>=1.2.0 <2.0.0', 'Can release on 1.x only within range'); t.is(getBranch(result, '1.x').range, '>=1.2.0 <2.0.0', 'Can release on 1.x only within range');
}); });
test('Throw SemanticReleaseError for invalid configurations', async (t) => { test.serial('Throw SemanticReleaseError for invalid configurations', async (t) => {
const branches = [ const branches = [
{name: '123', range: '123', tags: []}, {name: '123', range: '123', tags: []},
{name: '1.x', tags: []}, {name: '1.x', tags: []},
@ -201,10 +208,12 @@ test('Throw SemanticReleaseError for invalid configurations', async (t) => {
{name: 'alpha', prerelease: 'alpha', tags: []}, {name: 'alpha', prerelease: 'alpha', tags: []},
{name: 'preview', prerelease: 'alpha', tags: []}, {name: 'preview', prerelease: 'alpha', tags: []},
]; ];
td.replace('../../lib/branches/get-tags', () => branches); const context = {options: {branches}};
td.replace('../../lib/branches/expand', () => []); td.when(expand(repositoryUrl, context, branches)).thenResolve(remoteBranches);
const getBranches = require('../../lib/branches'); td.when(getTags(context, remoteBranches)).thenResolve(branches);
const errors = [...(await t.throwsAsync(getBranches('repositoryUrl', 'master', {options: {branches}})))];
const error = await t.throwsAsync(getBranches(repositoryUrl, 'master', context));
const errors = [...error.errors];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'EMAINTENANCEBRANCH'); t.is(errors[0].code, 'EMAINTENANCEBRANCH');
@ -228,16 +237,16 @@ test('Throw SemanticReleaseError for invalid configurations', async (t) => {
t.truthy(errors[4].details); t.truthy(errors[4].details);
}); });
test('Throw a SemanticReleaseError if there is duplicate branches', async (t) => { test.serial('Throw a SemanticReleaseError if there is duplicate branches', async (t) => {
const branches = [ const branches = [
{name: 'master', tags: []}, {name: 'master', tags: []},
{name: 'master', tags: []}, {name: 'master', tags: []},
]; ];
td.replace('../../lib/branches/get-tags', () => branches); const context = {options: {branches}};
td.replace('../../lib/branches/expand', () => []); td.when(expand(repositoryUrl, context, branches)).thenResolve(remoteBranches);
const getBranches = require('../../lib/branches'); td.when(getTags(context, remoteBranches)).thenResolve(branches);
const errors = [...(await t.throwsAsync(getBranches('repositoryUrl', 'master', {options: {branches}})))]; const errors = [...(await t.throwsAsync(getBranches(repositoryUrl, 'master', context))).errors];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'EDUPLICATEBRANCHES'); t.is(errors[0].code, 'EDUPLICATEBRANCHES');
@ -245,16 +254,17 @@ test('Throw a SemanticReleaseError if there is duplicate branches', async (t) =>
t.truthy(errors[0].details); t.truthy(errors[0].details);
}); });
test('Throw a SemanticReleaseError for each invalid branch name', async (t) => { test.serial('Throw a SemanticReleaseError for each invalid branch name', async (t) => {
const branches = [ const branches = [
{name: '~master', tags: []}, {name: '~master', tags: []},
{name: '^master', tags: []}, {name: '^master', tags: []},
]; ];
td.replace('../../lib/branches/get-tags', () => branches); const context = {options: {branches}};
td.replace('../../lib/branches/expand', () => []); const remoteBranches = [];
const getBranches = require('../../lib/branches'); td.when(expand(repositoryUrl, context, branches)).thenResolve(remoteBranches);
td.when(getTags(context, remoteBranches)).thenResolve(branches);
const errors = [...(await t.throwsAsync(getBranches('repositoryUrl', 'master', {options: {branches}})))]; const errors = [...(await t.throwsAsync(getBranches(repositoryUrl, 'master', context))).errors];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'EINVALIDBRANCHNAME'); t.is(errors[0].code, 'EINVALIDBRANCHNAME');

View File

@ -1,6 +1,6 @@
const test = require('ava'); import test from 'ava';
const expand = require('../../lib/branches/expand'); import expand from '../../lib/branches/expand.js';
const {gitRepo, gitCommits, gitCheckout, gitPush} = require('../helpers/git-utils'); import {gitCheckout, gitCommits, gitPush, gitRepo} from '../helpers/git-utils.js';
test('Expand branches defined with globs', async (t) => { test('Expand branches defined with globs', async (t) => {
const {cwd, repositoryUrl} = await gitRepo(true); const {cwd, repositoryUrl} = await gitRepo(true);

View File

@ -1,6 +1,6 @@
const test = require('ava'); import test from 'ava';
const getTags = require('../../lib/branches/get-tags'); import getTags from '../../lib/branches/get-tags.js';
const {gitRepo, gitCommits, gitTagVersion, gitCheckout, gitAddNote} = require('../helpers/git-utils'); import {gitAddNote, gitCheckout, gitCommits, gitRepo, gitTagVersion} from '../helpers/git-utils.js';
test('Get the valid tags', async (t) => { test('Get the valid tags', async (t) => {
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();

View File

@ -1,5 +1,5 @@
const test = require('ava'); import test from 'ava';
const normalize = require('../../lib/branches/normalize'); import * as normalize from '../../lib/branches/normalize.js';
const toTags = (versions) => versions.map((version) => ({version})); const toTags = (versions) => versions.map((version) => ({version}));

View File

@ -1,8 +1,8 @@
const test = require('ava'); import test from 'ava';
const {escapeRegExp} = require('lodash'); import {escapeRegExp} from 'lodash-es';
const td = require('testdouble'); import * as td from 'testdouble';
const {stub} = require('sinon'); import {stub} from 'sinon';
const {SECRET_REPLACEMENT} = require('../lib/definitions/constants'); import {SECRET_REPLACEMENT} from '../lib/definitions/constants.js';
let previousArgv; let previousArgv;
let previousEnv; let previousEnv;
@ -27,10 +27,11 @@ test.afterEach.always((t) => {
process.argv = previousArgv; process.argv = previousArgv;
process.env = previousEnv; process.env = previousEnv;
td.reset();
}); });
test.serial('Pass options to semantic-release API', async (t) => { test.serial('Pass options to semantic-release API', async (t) => {
const run = stub().resolves(true);
const argv = [ const argv = [
'', '',
'', '',
@ -72,33 +73,49 @@ test.serial('Pass options to semantic-release API', async (t) => {
'--debug', '--debug',
'-d', '-d',
]; ];
td.replace('..', run); const index = await td.replaceEsm('../index.js');
process.argv = argv; process.argv = argv;
const cli = require('../cli'); const cli = (await import('../cli.js')).default;
const exitCode = await cli(); const exitCode = await cli();
t.deepEqual(run.args[0][0].branches, ['master', 'next']); td.verify(index.default({
t.is(run.args[0][0].repositoryUrl, 'https://github/com/owner/repo.git'); branches: ['master', 'next'],
t.is(run.args[0][0].tagFormat, `v\${version}`); b: ['master', 'next'],
t.deepEqual(run.args[0][0].plugins, ['plugin1', 'plugin2']); 'repository-url': 'https://github/com/owner/repo.git',
t.deepEqual(run.args[0][0].extends, ['config1', 'config2']); repositoryUrl: 'https://github/com/owner/repo.git',
t.deepEqual(run.args[0][0].verifyConditions, ['condition1', 'condition2']); r: 'https://github/com/owner/repo.git',
t.is(run.args[0][0].analyzeCommits, 'analyze'); 'tag-format': `v\${version}`,
t.deepEqual(run.args[0][0].verifyRelease, ['verify1', 'verify2']); tagFormat: `v\${version}`,
t.deepEqual(run.args[0][0].generateNotes, ['notes']); t: `v\${version}`,
t.deepEqual(run.args[0][0].prepare, ['prepare1', 'prepare2']); plugins: ['plugin1', 'plugin2'],
t.deepEqual(run.args[0][0].publish, ['publish1', 'publish2']); p: ['plugin1', 'plugin2'],
t.deepEqual(run.args[0][0].success, ['success1', 'success2']); extends: ['config1', 'config2'],
t.deepEqual(run.args[0][0].fail, ['fail1', 'fail2']); e: ['config1', 'config2'],
t.is(run.args[0][0].debug, true); 'dry-run': true,
t.is(run.args[0][0].dryRun, true); dryRun: true,
d: true,
verifyConditions: ['condition1', 'condition2'],
'verify-conditions': ['condition1', 'condition2'],
analyzeCommits: 'analyze',
'analyze-commits': 'analyze',
verifyRelease: ['verify1', 'verify2'],
'verify-release': ['verify1', 'verify2'],
generateNotes: ['notes'],
'generate-notes': ['notes'],
prepare: ['prepare1', 'prepare2'],
publish: ['publish1', 'publish2'],
success: ['success1', 'success2'],
fail: ['fail1', 'fail2'],
debug: true,
_: [],
'$0': ''
}));
t.is(exitCode, 0); t.is(exitCode, 0);
}); });
test.serial('Pass options to semantic-release API with alias arguments', async (t) => { test.serial('Pass options to semantic-release API with alias arguments', async (t) => {
const run = stub().resolves(true);
const argv = [ const argv = [
'', '',
'', '',
@ -116,48 +133,65 @@ test.serial('Pass options to semantic-release API with alias arguments', async (
'config2', 'config2',
'--dry-run', '--dry-run',
]; ];
td.replace('..', run); const index = await td.replaceEsm('../index.js');
process.argv = argv; process.argv = argv;
const cli = require('../cli'); const cli = (await import('../cli.js')).default;
const exitCode = await cli(); const exitCode = await cli();
t.deepEqual(run.args[0][0].branches, ['master']); td.verify(index.default({
t.is(run.args[0][0].repositoryUrl, 'https://github/com/owner/repo.git'); branches: ['master'],
t.is(run.args[0][0].tagFormat, `v\${version}`); b: ['master'],
t.deepEqual(run.args[0][0].plugins, ['plugin1', 'plugin2']); 'repository-url': 'https://github/com/owner/repo.git',
t.deepEqual(run.args[0][0].extends, ['config1', 'config2']); repositoryUrl: 'https://github/com/owner/repo.git',
t.is(run.args[0][0].dryRun, true); r: 'https://github/com/owner/repo.git',
'tag-format': `v\${version}`,
tagFormat: `v\${version}`,
t: `v\${version}`,
plugins: ['plugin1', 'plugin2'],
p: ['plugin1', 'plugin2'],
extends: ['config1', 'config2'],
e: ['config1', 'config2'],
'dry-run': true,
dryRun: true,
d: true,
_: [],
'$0': ''
}));
t.is(exitCode, 0); t.is(exitCode, 0);
}); });
test.serial('Pass unknown options to semantic-release API', async (t) => { test.serial('Pass unknown options to semantic-release API', async (t) => {
const run = stub().resolves(true);
const argv = ['', '', '--bool', '--first-option', 'value1', '--second-option', 'value2', '--second-option', 'value3']; const argv = ['', '', '--bool', '--first-option', 'value1', '--second-option', 'value2', '--second-option', 'value3'];
td.replace('..', run); const index = await td.replaceEsm('../index.js');
process.argv = argv; process.argv = argv;
const cli = require('../cli'); const cli = (await import('../cli.js')).default;
const exitCode = await cli(); const exitCode = await cli();
t.is(run.args[0][0].bool, true); td.verify(index.default({
t.is(run.args[0][0].firstOption, 'value1'); bool: true,
t.deepEqual(run.args[0][0].secondOption, ['value2', 'value3']); firstOption: 'value1',
'first-option': 'value1',
secondOption: ['value2', 'value3'],
'second-option': ['value2', 'value3'],
_: [],
'$0': ''
}));
t.is(exitCode, 0); t.is(exitCode, 0);
}); });
test.serial('Pass empty Array to semantic-release API for list option set to "false"', async (t) => { test.serial('Pass empty Array to semantic-release API for list option set to "false"', async (t) => {
const run = stub().resolves(true);
const argv = ['', '', '--publish', 'false']; const argv = ['', '', '--publish', 'false'];
td.replace('..', run); const index = await td.replaceEsm('../index.js');
process.argv = argv; process.argv = argv;
const cli = require('../cli'); const cli = (await import('../cli.js')).default;
const exitCode = await cli(); const exitCode = await cli();
t.deepEqual(run.args[0][0].publish, []); td.verify(index.default({publish: [], _: [], '$0': ''}));
t.is(exitCode, 0); t.is(exitCode, 0);
}); });
@ -165,9 +199,9 @@ test.serial('Pass empty Array to semantic-release API for list option set to "fa
test.serial('Do not set properties in option for which arg is not in command line', async (t) => { test.serial('Do not set properties in option for which arg is not in command line', async (t) => {
const run = stub().resolves(true); const run = stub().resolves(true);
const argv = ['', '', '-b', 'master']; const argv = ['', '', '-b', 'master'];
td.replace('..', run); await td.replaceEsm('../index.js', null, run);
process.argv = argv; process.argv = argv;
const cli = require('../cli'); const cli = (await import('../cli.js')).default;
await cli(); await cli();
@ -184,9 +218,9 @@ test.serial('Do not set properties in option for which arg is not in command lin
test.serial('Display help', async (t) => { test.serial('Display help', async (t) => {
const run = stub().resolves(true); const run = stub().resolves(true);
const argv = ['', '', '--help']; const argv = ['', '', '--help'];
td.replace('..', run); await td.replaceEsm('../index.js', null, run);
process.argv = argv; process.argv = argv;
const cli = require('../cli'); const cli = (await import('../cli.js')).default;
const exitCode = await cli(); const exitCode = await cli();
@ -197,9 +231,9 @@ test.serial('Display help', async (t) => {
test.serial('Return error exitCode and prints help if called with a command', async (t) => { test.serial('Return error exitCode and prints help if called with a command', async (t) => {
const run = stub().resolves(true); const run = stub().resolves(true);
const argv = ['', '', 'pre']; const argv = ['', '', 'pre'];
td.replace('..', run); await td.replaceEsm('../index.js', null, run);
process.argv = argv; process.argv = argv;
const cli = require('../cli'); const cli = (await import('../cli.js')).default;
const exitCode = await cli(); const exitCode = await cli();
@ -211,9 +245,9 @@ test.serial('Return error exitCode and prints help if called with a command', as
test.serial('Return error exitCode if multiple plugin are set for single plugin', async (t) => { test.serial('Return error exitCode if multiple plugin are set for single plugin', async (t) => {
const run = stub().resolves(true); const run = stub().resolves(true);
const argv = ['', '', '--analyze-commits', 'analyze1', 'analyze2']; const argv = ['', '', '--analyze-commits', 'analyze1', 'analyze2'];
td.replace('..', run); await td.replaceEsm('../index.js', null, run);
process.argv = argv; process.argv = argv;
const cli = require('../cli'); const cli = (await import('../cli.js')).default;
const exitCode = await cli(); const exitCode = await cli();
@ -223,11 +257,11 @@ test.serial('Return error exitCode if multiple plugin are set for single plugin'
}); });
test.serial('Return error exitCode if semantic-release throw error', async (t) => { test.serial('Return error exitCode if semantic-release throw error', async (t) => {
const run = stub().rejects(new Error('semantic-release error'));
const argv = ['', '']; const argv = ['', ''];
td.replace('..', run); const index = await td.replaceEsm('../index.js');
td.when(index.default({_: [], '$0': ''})).thenReject(new Error('semantic-release error'));
process.argv = argv; process.argv = argv;
const cli = require('../cli'); const cli = (await import('../cli.js')).default;
const exitCode = await cli(); const exitCode = await cli();
@ -237,12 +271,12 @@ test.serial('Return error exitCode if semantic-release throw error', async (t) =
test.serial('Hide sensitive environment variable values from the logs', async (t) => { test.serial('Hide sensitive environment variable values from the logs', async (t) => {
const env = {MY_TOKEN: 'secret token'}; const env = {MY_TOKEN: 'secret token'};
const run = stub().rejects(new Error(`Throw error: Exposing token ${env.MY_TOKEN}`));
const argv = ['', '']; const argv = ['', ''];
td.replace('..', run); const index = await td.replaceEsm('../index.js');
td.when(index.default({_: [], '$0': ''})).thenReject(new Error(`Throw error: Exposing token ${env.MY_TOKEN}`));
process.argv = argv; process.argv = argv;
process.env = {...process.env, ...env}; process.env = {...process.env, ...env};
const cli = require('../cli'); const cli = (await import('../cli.js')).default;
const exitCode = await cli(); const exitCode = await cli();

View File

@ -1,5 +1,5 @@
const test = require('ava'); import test from 'ava';
const {maintenance, prerelease, release} = require('../../lib/definitions/branches'); import {maintenance, prerelease, release} from '../../lib/definitions/branches.js';
test('A "maintenance" branch is identified by having a "range" property or a "name" formatted like "N.x", "N.x.x" or "N.N.x"', (t) => { test('A "maintenance" branch is identified by having a "range" property or a "name" formatted like "N.x", "N.x.x" or "N.N.x"', (t) => {
/* eslint-disable unicorn/no-fn-reference-in-iterator */ /* eslint-disable unicorn/no-fn-reference-in-iterator */

View File

@ -1,6 +1,6 @@
const test = require('ava'); import test from 'ava';
const plugins = require('../../lib/definitions/plugins'); import plugins from '../../lib/definitions/plugins.js';
const {RELEASE_NOTES_SEPARATOR, SECRET_REPLACEMENT} = require('../../lib/definitions/constants'); import {RELEASE_NOTES_SEPARATOR, SECRET_REPLACEMENT} from '../../lib/definitions/constants.js';
test('The "analyzeCommits" plugin output must be either undefined or a valid semver release type', (t) => { test('The "analyzeCommits" plugin output must be either undefined or a valid semver release type', (t) => {
t.false(plugins.analyzeCommits.outputValidator('invalid')); t.false(plugins.analyzeCommits.outputValidator('invalid'));

View File

@ -1 +1 @@
module.exports = () => {}; export default () => {}

View File

@ -1,4 +1,4 @@
const SemanticReleaseError = require('@semantic-release/error'); import SemanticReleaseError from '@semantic-release/error';
class InheritedError extends SemanticReleaseError { class InheritedError extends SemanticReleaseError {
constructor(message, code) { constructor(message, code) {
@ -9,6 +9,6 @@ class InheritedError extends SemanticReleaseError {
} }
} }
module.exports = () => { export default () => {
throw new InheritedError('Inherited error', 'EINHERITED'); throw new InheritedError('Inherited error', 'EINHERITED');
}; }

View File

@ -1,5 +1,5 @@
module.exports = () => { export default () => {
const error = new Error('a'); const error = new Error('a');
error.errorProperty = 'errorProperty'; error.errorProperty = 'errorProperty';
throw error; throw error;
}; }

View File

@ -1,5 +1,5 @@
const AggregateError = require('aggregate-error'); import AggregateError from 'aggregate-error';
module.exports = () => { export default () => {
throw new AggregateError([new Error('a'), new Error('b')]); throw new AggregateError([new Error('a'), new Error('b')]);
}; }

View File

@ -1 +1 @@
module.exports = (pluginConfig, context) => context; export default (pluginConfig, context) => context

View File

@ -1,6 +1,6 @@
module.exports = (pluginConfig, {env, logger}) => { export default (pluginConfig, {env, logger}) => {
console.log(`Console: Exposing token ${env.MY_TOKEN}`); console.log(`Console: Exposing token ${env.MY_TOKEN}`);
logger.log(`Log: Exposing token ${env.MY_TOKEN}`); logger.log(`Log: Exposing token ${env.MY_TOKEN}`);
logger.error(`Error: Console token ${env.MY_TOKEN}`); logger.error(`Error: Console token ${env.MY_TOKEN}`);
throw new Error(`Throw error: Exposing ${env.MY_TOKEN}`); throw new Error(`Throw error: Exposing ${env.MY_TOKEN}`);
}; }

View File

@ -1 +1 @@
module.exports = (pluginConfig, context) => ({pluginConfig, context}); export default (pluginConfig, context) => ({pluginConfig, context})

View File

@ -1,7 +1,7 @@
const test = require('ava'); import test from 'ava';
const {stub} = require('sinon'); import {stub} from 'sinon';
const getCommits = require('../lib/get-commits'); import getCommits from '../lib/get-commits.js';
const {gitRepo, gitCommits, gitDetachedHead} = require('./helpers/git-utils'); import {gitCommits, gitDetachedHead, gitRepo} from './helpers/git-utils.js';
test.beforeEach((t) => { test.beforeEach((t) => {
// Stub the logger functions // Stub the logger functions

View File

@ -1,12 +1,15 @@
const path = require('path'); import path from 'node:path';
const {format} = require('util'); import {format} from 'node:util';
const test = require('ava'); import test from 'ava';
const {writeFile, outputJson} = require('fs-extra'); import fsExtra from 'fs-extra';
const {omit} = require('lodash'); import {omit} from 'lodash-es';
const td = require('testdouble'); import * as td from 'testdouble';
const {stub} = require('sinon'); import yaml from 'js-yaml';
const yaml = require('js-yaml'); import {gitAddConfig, gitCommits, gitRepo, gitShallowClone, gitTagVersion} from './helpers/git-utils.js';
const {gitRepo, gitTagVersion, gitCommits, gitShallowClone, gitAddConfig} = require('./helpers/git-utils');
const {outputJson, writeFile} = fsExtra;
const pluginsConfig = {foo: 'bar', baz: 'qux'};
let plugins;
const DEFAULT_PLUGINS = [ const DEFAULT_PLUGINS = [
'@semantic-release/commit-analyzer', '@semantic-release/commit-analyzer',
@ -15,10 +18,13 @@ const DEFAULT_PLUGINS = [
'@semantic-release/github', '@semantic-release/github',
]; ];
test.beforeEach((t) => { test.beforeEach(async (t) => {
t.context.plugins = stub().returns({}); plugins = (await td.replaceEsm('../lib/plugins/index.js')).default;
td.replace('../lib/plugins', t.context.plugins); t.context.getConfig = (await import('../lib/get-config.js')).default;
t.context.getConfig = require('../lib/get-config'); });
test.afterEach.always((t) => {
td.reset();
}); });
test('Default values, reading repositoryUrl from package.json', async (t) => { test('Default values, reading repositoryUrl from package.json', async (t) => {
@ -103,7 +109,7 @@ test('Convert "ci" option to "noCi"', async (t) => {
t.is(result.noCi, true); t.is(result.noCi, true);
}); });
test('Read options from package.json', async (t) => { test.serial('Read options from package.json', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();
const options = { const options = {
@ -114,19 +120,18 @@ test('Read options from package.json', async (t) => {
tagFormat: `v\${version}`, tagFormat: `v\${version}`,
plugins: false, plugins: false,
}; };
// Verify the plugins module is called with the plugin options from package.json
td.when(plugins({cwd, options}, {})).thenResolve(pluginsConfig);
// Create package.json in repository root // Create package.json in repository root
await outputJson(path.resolve(cwd, 'package.json'), {release: options}); await outputJson(path.resolve(cwd, 'package.json'), {release: options});
const {options: result} = await t.context.getConfig({cwd}); const result = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from package.json // Verify the options contains the plugin config from package.json
t.deepEqual(result, expected); t.deepEqual(result, {options, plugins: pluginsConfig});
// Verify the plugins module is called with the plugin options from package.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
}); });
test('Read options from .releaserc.yml', async (t) => { test.serial('Read options from .releaserc.yml', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();
const options = { const options = {
@ -138,17 +143,16 @@ test('Read options from .releaserc.yml', async (t) => {
}; };
// Create package.json in repository root // Create package.json in repository root
await writeFile(path.resolve(cwd, '.releaserc.yml'), yaml.dump(options)); await writeFile(path.resolve(cwd, '.releaserc.yml'), yaml.dump(options));
const {options: result} = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from package.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from package.json // Verify the plugins module is called with the plugin options from package.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd}); td.when(plugins({cwd, options}, {})).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from package.json
t.deepEqual(result, {options, plugins: pluginsConfig});
}); });
test('Read options from .releaserc.json', async (t) => { test.serial('Read options from .releaserc.json', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();
const options = { const options = {
@ -160,17 +164,16 @@ test('Read options from .releaserc.json', async (t) => {
}; };
// Create package.json in repository root // Create package.json in repository root
await outputJson(path.resolve(cwd, '.releaserc.json'), options); await outputJson(path.resolve(cwd, '.releaserc.json'), options);
const {options: result} = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from package.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from package.json // Verify the plugins module is called with the plugin options from package.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd}); td.when(plugins({cwd, options}, {})).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from package.json
t.deepEqual(result, {options, plugins: pluginsConfig});
}); });
test('Read options from .releaserc.js', async (t) => { test.serial('Read options from .releaserc.js', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();
const options = { const options = {
@ -182,17 +185,16 @@ test('Read options from .releaserc.js', async (t) => {
}; };
// Create package.json in repository root // Create package.json in repository root
await writeFile(path.resolve(cwd, '.releaserc.js'), `module.exports = ${JSON.stringify(options)}`); await writeFile(path.resolve(cwd, '.releaserc.js'), `module.exports = ${JSON.stringify(options)}`);
const {options: result} = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from package.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from package.json // Verify the plugins module is called with the plugin options from package.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd}); td.when(plugins({cwd, options}, {})).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from package.json
t.deepEqual(result, {options, plugins: pluginsConfig});
}); });
test('Read options from .releaserc.cjs', async (t) => { test.serial('Read options from .releaserc.cjs', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();
const options = { const options = {
@ -204,17 +206,16 @@ test('Read options from .releaserc.cjs', async (t) => {
}; };
// Create .releaserc.cjs in repository root // Create .releaserc.cjs in repository root
await writeFile(path.resolve(cwd, '.releaserc.cjs'), `module.exports = ${JSON.stringify(options)}`); await writeFile(path.resolve(cwd, '.releaserc.cjs'), `module.exports = ${JSON.stringify(options)}`);
const {options: result} = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from .releaserc.cjs
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from .releaserc.cjs // Verify the plugins module is called with the plugin options from .releaserc.cjs
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd}); td.when(plugins({cwd, options}, {})).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from .releaserc.cjs
t.deepEqual(result, {options, plugins: pluginsConfig});
}); });
test('Read options from release.config.js', async (t) => { test.serial('Read options from release.config.js', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();
const options = { const options = {
@ -226,17 +227,16 @@ test('Read options from release.config.js', async (t) => {
}; };
// Create package.json in repository root // Create package.json in repository root
await writeFile(path.resolve(cwd, 'release.config.js'), `module.exports = ${JSON.stringify(options)}`); await writeFile(path.resolve(cwd, 'release.config.js'), `module.exports = ${JSON.stringify(options)}`);
const {options: result} = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from package.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from package.json // Verify the plugins module is called with the plugin options from package.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd}); td.when(plugins({cwd, options}, {})).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from package.json
t.deepEqual(result, {options, plugins: pluginsConfig});
}); });
test('Read options from release.config.cjs', async (t) => { test.serial('Read options from release.config.cjs', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();
const options = { const options = {
@ -246,19 +246,18 @@ test('Read options from release.config.cjs', async (t) => {
tagFormat: `v\${version}`, tagFormat: `v\${version}`,
plugins: false, plugins: false,
}; };
// Verify the plugins module is called with the plugin options from release.config.cjs
td.when(plugins({cwd, options}, {})).thenResolve(pluginsConfig);
// Create release.config.cjs in repository root // Create release.config.cjs in repository root
await writeFile(path.resolve(cwd, 'release.config.cjs'), `module.exports = ${JSON.stringify(options)}`); await writeFile(path.resolve(cwd, 'release.config.cjs'), `module.exports = ${JSON.stringify(options)}`);
const {options: result} = await t.context.getConfig({cwd}); const result = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from release.config.cjs // Verify the options contains the plugin config from release.config.cjs
t.deepEqual(result, expected); t.deepEqual(result, {options, plugins: pluginsConfig});
// Verify the plugins module is called with the plugin options from release.config.cjs
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
}); });
test('Prioritise CLI/API parameters over file configuration and git repo', async (t) => { test.serial('Prioritise CLI/API parameters over file configuration and git repo', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
let {cwd, repositoryUrl} = await gitRepo(); let {cwd, repositoryUrl} = await gitRepo();
await gitCommits(['First'], {cwd}); await gitCommits(['First'], {cwd});
@ -275,20 +274,19 @@ test('Prioritise CLI/API parameters over file configuration and git repo', async
tagFormat: `cli\${version}`, tagFormat: `cli\${version}`,
plugins: false, plugins: false,
}; };
// Verify the plugins module is called with the plugin options from CLI/API
td.when(plugins({cwd, options}, {})).thenResolve(pluginsConfig);
const pkg = {release: pkgOptions, repository: 'git@host.null:owner/module.git'}; const pkg = {release: pkgOptions, repository: 'git@host.null:owner/module.git'};
// Create package.json in repository root // Create package.json in repository root
await outputJson(path.resolve(cwd, 'package.json'), pkg); await outputJson(path.resolve(cwd, 'package.json'), pkg);
const result = await t.context.getConfig({cwd}, options); const result = await t.context.getConfig({cwd}, options);
const expected = {...options, branches: ['branch_cli']};
// Verify the options contains the plugin config from CLI/API // Verify the options contains the plugin config from CLI/API
t.deepEqual(result.options, expected); t.deepEqual(result, {options, plugins: pluginsConfig});
// Verify the plugins module is called with the plugin options from CLI/API
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
}); });
test('Read configuration from file path in "extends"', async (t) => { test.serial('Read configuration from file path in "extends"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();
const pkgOptions = {extends: './shareable.json'}; const pkgOptions = {extends: './shareable.json'};
@ -303,23 +301,24 @@ test('Read configuration from file path in "extends"', async (t) => {
// Create package.json and shareable.json in repository root // Create package.json and shareable.json in repository root
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions}); await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
await outputJson(path.resolve(cwd, 'shareable.json'), options); await outputJson(path.resolve(cwd, 'shareable.json'), options);
const {options: result} = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from shareable.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from shareable.json // Verify the plugins module is called with the plugin options from shareable.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd}); td.when(plugins(
t.deepEqual(t.context.plugins.args[0][1], { {cwd, options},
analyzeCommits: './shareable.json', {
generateNotes: './shareable.json', analyzeCommits: './shareable.json',
'plugin-1': './shareable.json', generateNotes: './shareable.json',
'plugin-2': './shareable.json', 'plugin-1': './shareable.json',
}); 'plugin-2': './shareable.json',
}
)).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from shareable.json
t.deepEqual(result, {options, plugins: pluginsConfig});
}); });
test('Read configuration from module path in "extends"', async (t) => { test.serial('Read configuration from module path in "extends"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();
const pkgOptions = {extends: 'shareable'}; const pkgOptions = {extends: 'shareable'};
@ -334,21 +333,19 @@ test('Read configuration from module path in "extends"', async (t) => {
// Create package.json and shareable.json in repository root // Create package.json and shareable.json in repository root
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions}); await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
await outputJson(path.resolve(cwd, 'node_modules/shareable/index.json'), options); await outputJson(path.resolve(cwd, 'node_modules/shareable/index.json'), options);
const {options: result} = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from shareable.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from shareable.json // Verify the plugins module is called with the plugin options from shareable.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd}); td.when(plugins(
t.deepEqual(t.context.plugins.args[0][1], { {cwd, options},
analyzeCommits: 'shareable', {analyzeCommits: 'shareable', generateNotes: 'shareable'}
generateNotes: 'shareable', )).thenResolve(pluginsConfig);
});
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from shareable.json
t.deepEqual(result, {options, plugins: pluginsConfig});
}); });
test('Read configuration from an array of paths in "extends"', async (t) => { test.serial('Read configuration from an array of paths in "extends"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();
const pkgOptions = {extends: ['./shareable1.json', './shareable2.json']}; const pkgOptions = {extends: ['./shareable1.json', './shareable2.json']};
@ -370,24 +367,26 @@ test('Read configuration from an array of paths in "extends"', async (t) => {
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions}); await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
await outputJson(path.resolve(cwd, 'shareable1.json'), options1); await outputJson(path.resolve(cwd, 'shareable1.json'), options1);
await outputJson(path.resolve(cwd, 'shareable2.json'), options2); await outputJson(path.resolve(cwd, 'shareable2.json'), options2);
const expectedOptions = {...options1, ...options2, branches: ['test_branch']};
const {options: result} = await t.context.getConfig({cwd});
const expected = {...options1, ...options2, branches: ['test_branch']};
// Verify the options contains the plugin config from shareable1.json and shareable2.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from shareable1.json and shareable2.json // Verify the plugins module is called with the plugin options from shareable1.json and shareable2.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd}); td.when(plugins(
t.deepEqual(t.context.plugins.args[0][1], { {options: expectedOptions, cwd},
verifyRelease1: './shareable1.json', {
verifyRelease2: './shareable2.json', verifyRelease1: './shareable1.json',
generateNotes2: './shareable2.json', verifyRelease2: './shareable2.json',
analyzeCommits1: './shareable1.json', generateNotes2: './shareable2.json',
analyzeCommits2: './shareable2.json', analyzeCommits1: './shareable1.json',
}); analyzeCommits2: './shareable2.json',
}
)).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from shareable1.json and shareable2.json
t.deepEqual(result, {options: expectedOptions, plugins: pluginsConfig});
}); });
test('Prioritize configuration from config file over "extends"', async (t) => { test.serial('Prioritize configuration from config file over "extends"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();
const pkgOptions = { const pkgOptions = {
@ -408,22 +407,24 @@ test('Prioritize configuration from config file over "extends"', async (t) => {
// Create package.json and shareable.json in repository root // Create package.json and shareable.json in repository root
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions}); await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
await outputJson(path.resolve(cwd, 'shareable.json'), options1); await outputJson(path.resolve(cwd, 'shareable.json'), options1);
const expectedOptions = omit({...options1, ...pkgOptions, branches: ['test_pkg']}, 'extends');
const {options: result} = await t.context.getConfig({cwd});
const expected = omit({...options1, ...pkgOptions, branches: ['test_pkg']}, 'extends');
// Verify the options contains the plugin config from package.json and shareable.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from package.json and shareable.json // Verify the plugins module is called with the plugin options from package.json and shareable.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd}); td.when(plugins(
t.deepEqual(t.context.plugins.args[0][1], { {cwd, options: expectedOptions},
analyzeCommits: './shareable.json', {
generateNotesShareable: './shareable.json', analyzeCommits: './shareable.json',
publishShareable: './shareable.json', generateNotesShareable: './shareable.json',
}); publishShareable: './shareable.json',
}
)).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from package.json and shareable.json
t.deepEqual(result, {options: expectedOptions, plugins: pluginsConfig});
}); });
test('Prioritize configuration from cli/API options over "extends"', async (t) => { test.serial('Prioritize configuration from cli/API options over "extends"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();
const cliOptions = { const cliOptions = {
@ -456,17 +457,20 @@ test('Prioritize configuration from cli/API options over "extends"', async (t) =
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions}); await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
await outputJson(path.resolve(cwd, 'shareable1.json'), options1); await outputJson(path.resolve(cwd, 'shareable1.json'), options1);
await outputJson(path.resolve(cwd, 'shareable2.json'), options2); await outputJson(path.resolve(cwd, 'shareable2.json'), options2);
const expectedOptions = omit({...options2, ...pkgOptions, ...cliOptions, branches: ['branch_opts']}, 'extends');
const {options: result} = await t.context.getConfig({cwd}, cliOptions);
const expected = omit({...options2, ...pkgOptions, ...cliOptions, branches: ['branch_opts']}, 'extends');
// Verify the options contains the plugin config from package.json and shareable2.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from package.json and shareable2.json // Verify the plugins module is called with the plugin options from package.json and shareable2.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd}); td.when(plugins(
{cwd, options: expectedOptions},
{analyzeCommits2: './shareable2.json', publishShareable: './shareable2.json'}
)).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd}, cliOptions);
// Verify the options contains the plugin config from package.json and shareable2.json
t.deepEqual(result, {options: expectedOptions, plugins: pluginsConfig});
}); });
test('Allow to unset properties defined in shareable config with "null"', async (t) => { test.serial('Allow to unset properties defined in shareable config with "null"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();
const pkgOptions = { const pkgOptions = {
@ -485,33 +489,40 @@ test('Allow to unset properties defined in shareable config with "null"', async
// Create package.json and shareable.json in repository root // Create package.json and shareable.json in repository root
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions}); await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
await outputJson(path.resolve(cwd, 'shareable.json'), options1); await outputJson(path.resolve(cwd, 'shareable.json'), options1);
// Verify the plugins module is called with the plugin options from shareable.json and the default `plugins`
td.when(plugins(
{
options: {
...omit(options1, 'analyzeCommits'),
...omit(pkgOptions, ['extends', 'analyzeCommits']),
plugins: DEFAULT_PLUGINS,
},
cwd,
},
{
generateNotes: './shareable.json',
analyzeCommits: './shareable.json',
'test-plugin': './shareable.json',
}
)).thenResolve(pluginsConfig);
const {options} = await t.context.getConfig({cwd}); const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from shareable.json and the default `plugins` // Verify the options contains the plugin config from shareable.json and the default `plugins`
t.deepEqual(options, { t.deepEqual(
...omit(options1, ['analyzeCommits']), result,
...omit(pkgOptions, ['extends', 'analyzeCommits']), {
plugins: DEFAULT_PLUGINS, options: {
}); ...omit(options1, ['analyzeCommits']),
// Verify the plugins module is called with the plugin options from shareable.json and the default `plugins` ...omit(pkgOptions, ['extends', 'analyzeCommits']),
t.deepEqual(t.context.plugins.args[0][0], { plugins: DEFAULT_PLUGINS,
options: { },
...omit(options1, 'analyzeCommits'), plugins: pluginsConfig
...omit(pkgOptions, ['extends', 'analyzeCommits']), }
plugins: DEFAULT_PLUGINS, );
},
cwd,
});
t.deepEqual(t.context.plugins.args[0][1], {
generateNotes: './shareable.json',
analyzeCommits: './shareable.json',
'test-plugin': './shareable.json',
});
}); });
test('Allow to unset properties defined in shareable config with "undefined"', async (t) => { test.serial('Allow to unset properties defined in shareable config with "undefined"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();
const pkgOptions = { const pkgOptions = {
@ -526,25 +537,24 @@ test('Allow to unset properties defined in shareable config with "undefined"', a
tagFormat: `v\${version}`, tagFormat: `v\${version}`,
plugins: false, plugins: false,
}; };
// Create package.json and release.config.js in repository root // Create release.config.js and shareable.json in repository root
await writeFile(path.resolve(cwd, 'release.config.js'), `module.exports = ${format(pkgOptions)}`); await writeFile(path.resolve(cwd, 'release.config.js'), `module.exports = ${format(pkgOptions)}`);
await outputJson(path.resolve(cwd, 'shareable.json'), options1); await outputJson(path.resolve(cwd, 'shareable.json'), options1);
const expectedOptions = {
const {options: result} = await t.context.getConfig({cwd});
const expected = {
...omit(options1, 'analyzeCommits'), ...omit(options1, 'analyzeCommits'),
...omit(pkgOptions, ['extends', 'analyzeCommits']), ...omit(pkgOptions, ['extends', 'analyzeCommits']),
branches: ['test_branch'], branches: ['test_branch'],
}; };
// Verify the options contains the plugin config from shareable.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from shareable.json // Verify the plugins module is called with the plugin options from shareable.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd}); td.when(plugins(
t.deepEqual(t.context.plugins.args[0][1], { {options: expectedOptions, cwd},
generateNotes: './shareable.json', {generateNotes: './shareable.json', analyzeCommits: './shareable.json'}
analyzeCommits: './shareable.json', )).thenResolve(pluginsConfig);
});
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from shareable.json
t.deepEqual(result, {options: expectedOptions, plugins: pluginsConfig});
}); });
test('Throw an Error if one of the shareable config cannot be found', async (t) => { test('Throw an Error if one of the shareable config cannot be found', async (t) => {

View File

@ -1,6 +1,6 @@
const test = require('ava'); import test from 'ava';
const getAuthUrl = require('../lib/get-git-auth-url'); import getAuthUrl from '../lib/get-git-auth-url.js';
const {gitRepo} = require('./helpers/git-utils'); import {gitRepo} from './helpers/git-utils.js';
const env = {GIT_ASKPASS: 'echo', GIT_TERMINAL_PROMPT: 0}; const env = {GIT_ASKPASS: 'echo', GIT_TERMINAL_PROMPT: 0};

View File

@ -1,5 +1,5 @@
const test = require('ava'); import test from 'ava';
const getLastRelease = require('../lib/get-last-release'); import getLastRelease from '../lib/get-last-release.js';
test('Get the highest non-prerelease valid tag', (t) => { test('Get the highest non-prerelease valid tag', (t) => {
const result = getLastRelease({ const result = getLastRelease({

View File

@ -1,6 +1,6 @@
const test = require('ava'); import test from 'ava';
const {spy} = require('sinon'); import {spy} from 'sinon';
const getLogger = require('../lib/get-logger'); import getLogger from '../lib/get-logger.js';
test('Expose "error", "success" and "log" functions', (t) => { test('Expose "error", "success" and "log" functions', (t) => {
const stdout = spy(); const stdout = spy();

View File

@ -1,6 +1,6 @@
const test = require('ava'); import test from 'ava';
const {stub} = require('sinon'); import {stub} from 'sinon';
const getNextVersion = require('../lib/get-next-version'); import getNextVersion from '../lib/get-next-version.js';
test.beforeEach((t) => { test.beforeEach((t) => {
// Stub the logger functions // Stub the logger functions

View File

@ -1,5 +1,5 @@
const test = require('ava'); import test from 'ava';
const getReleaseToAdd = require('../lib/get-release-to-add'); import getReleaseToAdd from '../lib/get-release-to-add.js';
test('Return versions merged from release to maintenance branch, excluding lower than branch start range', (t) => { test('Return versions merged from release to maintenance branch, excluding lower than branch start range', (t) => {
const result = getReleaseToAdd({ const result = getReleaseToAdd({

View File

@ -1,40 +1,40 @@
const test = require('ava'); import test from 'ava';
const tempy = require('tempy'); import {temporaryDirectory} from 'tempy';
const { import {
getTagHead, addNote,
isRefExists,
fetch, fetch,
fetchNotes,
getBranches,
getGitHead, getGitHead,
getNote,
getTagHead,
getTags,
isBranchUpToDate,
isGitRepo,
isRefExists,
push,
repoUrl, repoUrl,
tag, tag,
push, verifyTagName
getTags, } from '../lib/git.js';
getBranches, import {
isGitRepo,
verifyTagName,
isBranchUpToDate,
getNote,
addNote,
fetchNotes,
} = require('../lib/git');
const {
gitRepo,
gitCommits,
gitCheckout,
gitTagVersion,
gitShallowClone,
gitGetCommits,
gitAddConfig, gitAddConfig,
gitAddNote,
gitCheckout,
gitCommits,
gitCommitTag, gitCommitTag,
gitRemoteTagHead,
gitPush,
gitDetachedHead, gitDetachedHead,
gitDetachedHeadFromBranch, gitDetachedHeadFromBranch,
gitAddNote,
gitGetNote,
gitFetch, gitFetch,
initGit, gitGetCommits,
} = require('./helpers/git-utils'); gitGetNote,
gitPush,
gitRemoteTagHead,
gitRepo,
gitShallowClone,
gitTagVersion,
initGit
} from './helpers/git-utils.js';
test('Get the last commit sha', async (t) => { test('Get the last commit sha', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
@ -268,7 +268,7 @@ test('Return "true" if in a Git repository', async (t) => {
}); });
test('Return falsy if not in a Git repository', async (t) => { test('Return falsy if not in a Git repository', async (t) => {
const cwd = tempy.directory(); const cwd = temporaryDirectory();
t.falsy(await isGitRepo({cwd})); t.falsy(await isGitRepo({cwd}));
}); });
@ -288,7 +288,7 @@ test('Return falsy for invalid tag names', async (t) => {
}); });
test('Throws error if obtaining the tags fails', async (t) => { test('Throws error if obtaining the tags fails', async (t) => {
const cwd = tempy.directory(); const cwd = temporaryDirectory();
await t.throwsAsync(getTags('master', {cwd})); await t.throwsAsync(getTags('master', {cwd}));
}); });

View File

@ -1,10 +1,10 @@
const tempy = require('tempy'); import {temporaryDirectory} from 'tempy';
const execa = require('execa'); import {execa} from 'execa';
const fileUrl = require('file-url'); import fileUrl from 'file-url';
const pEachSeries = require('p-each-series'); import pEachSeries from 'p-each-series';
const gitLogParser = require('git-log-parser'); import gitLogParser from 'git-log-parser';
const getStream = require('get-stream'); import getStream from 'get-stream';
const {GIT_NOTE_REF} = require('../../lib/definitions/constants'); import {GIT_NOTE_REF} from '../../lib/definitions/constants.js';
/** /**
* Commit message information. * Commit message information.
@ -23,8 +23,8 @@ const {GIT_NOTE_REF} = require('../../lib/definitions/constants');
* @param {Boolean} withRemote `true` to create a shallow clone of a bare repository. * @param {Boolean} withRemote `true` to create a shallow clone of a bare repository.
* @return {String} The path of the repository * @return {String} The path of the repository
*/ */
async function initGit(withRemote) { export async function initGit(withRemote) {
const cwd = tempy.directory(); const cwd = temporaryDirectory();
const args = withRemote ? ['--bare', '--initial-branch=master'] : ['--initial-branch=master']; const args = withRemote ? ['--bare', '--initial-branch=master'] : ['--initial-branch=master'];
await execa('git', ['init', ...args], {cwd}).catch(() => { await execa('git', ['init', ...args], {cwd}).catch(() => {
@ -45,7 +45,7 @@ async function initGit(withRemote) {
* @param {String} [branch='master'] The branch to initialize. * @param {String} [branch='master'] The branch to initialize.
* @return {String} The path of the clone if `withRemote` is `true`, the path of the repository otherwise. * @return {String} The path of the clone if `withRemote` is `true`, the path of the repository otherwise.
*/ */
async function gitRepo(withRemote, branch = 'master') { export async function gitRepo(withRemote, branch = 'master') {
let {cwd, repositoryUrl} = await initGit(withRemote); let {cwd, repositoryUrl} = await initGit(withRemote);
if (withRemote) { if (withRemote) {
await initBareRepo(repositoryUrl, branch); await initBareRepo(repositoryUrl, branch);
@ -70,8 +70,8 @@ async function gitRepo(withRemote, branch = 'master') {
* @param {String} repositoryUrl The URL of the bare repository. * @param {String} repositoryUrl The URL of the bare repository.
* @param {String} [branch='master'] the branch to initialize. * @param {String} [branch='master'] the branch to initialize.
*/ */
async function initBareRepo(repositoryUrl, branch = 'master') { export async function initBareRepo(repositoryUrl, branch = 'master') {
const cwd = tempy.directory(); const cwd = temporaryDirectory();
await execa('git', ['clone', '--no-hardlinks', repositoryUrl, cwd], {cwd}); await execa('git', ['clone', '--no-hardlinks', repositoryUrl, cwd], {cwd});
await gitCheckout(branch, true, {cwd}); await gitCheckout(branch, true, {cwd});
await gitCommits(['Initial commit'], {cwd}); await gitCommits(['Initial commit'], {cwd});
@ -86,7 +86,7 @@ async function initBareRepo(repositoryUrl, branch = 'master') {
* *
* @returns {Array<Commit>} The created commits, in reverse order (to match `git log` order). * @returns {Array<Commit>} The created commits, in reverse order (to match `git log` order).
*/ */
async function gitCommits(messages, execaOptions) { export async function gitCommits(messages, execaOptions) {
await pEachSeries( await pEachSeries(
messages, messages,
async (message) => async (message) =>
@ -103,7 +103,7 @@ async function gitCommits(messages, execaOptions) {
* *
* @return {Array<Object>} The list of parsed commits. * @return {Array<Object>} The list of parsed commits.
*/ */
async function gitGetCommits(from, execaOptions) { export async function gitGetCommits(from, execaOptions) {
Object.assign(gitLogParser.fields, {hash: 'H', message: 'B', gitTags: 'd', committerDate: {key: 'ci', type: Date}}); Object.assign(gitLogParser.fields, {hash: 'H', message: 'B', gitTags: 'd', committerDate: {key: 'ci', type: Date}});
return ( return (
await getStream.array( await getStream.array(
@ -126,7 +126,7 @@ async function gitGetCommits(from, execaOptions) {
* @param {Boolean} create to create the branch, `false` to checkout an existing branch. * @param {Boolean} create to create the branch, `false` to checkout an existing branch.
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
async function gitCheckout(branch, create, execaOptions) { export async function gitCheckout(branch, create, execaOptions) {
await execa('git', create ? ['checkout', '-b', branch] : ['checkout', branch], execaOptions); await execa('git', create ? ['checkout', '-b', branch] : ['checkout', branch], execaOptions);
} }
@ -136,7 +136,7 @@ async function gitCheckout(branch, create, execaOptions) {
* @param {String} repositoryUrl The repository remote URL. * @param {String} repositoryUrl The repository remote URL.
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
async function gitFetch(repositoryUrl, execaOptions) { export async function gitFetch(repositoryUrl, execaOptions) {
await execa('git', ['fetch', repositoryUrl], execaOptions); await execa('git', ['fetch', repositoryUrl], execaOptions);
} }
@ -147,7 +147,7 @@ async function gitFetch(repositoryUrl, execaOptions) {
* *
* @return {String} The sha of the head commit in the current git repository. * @return {String} The sha of the head commit in the current git repository.
*/ */
async function gitHead(execaOptions) { export async function gitHead(execaOptions) {
return (await execa('git', ['rev-parse', 'HEAD'], execaOptions)).stdout; return (await execa('git', ['rev-parse', 'HEAD'], execaOptions)).stdout;
} }
@ -158,7 +158,7 @@ async function gitHead(execaOptions) {
* @param {String} [sha] The commit on which to create the tag. If undefined the tag is created on the last commit. * @param {String} [sha] The commit on which to create the tag. If undefined the tag is created on the last commit.
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
async function gitTagVersion(tagName, sha, execaOptions) { export async function gitTagVersion(tagName, sha, execaOptions) {
await execa('git', sha ? ['tag', '-f', tagName, sha] : ['tag', tagName], execaOptions); await execa('git', sha ? ['tag', '-f', tagName, sha] : ['tag', tagName], execaOptions);
} }
@ -171,8 +171,8 @@ async function gitTagVersion(tagName, sha, execaOptions) {
* @param {Number} [depth=1] The number of commit to clone. * @param {Number} [depth=1] The number of commit to clone.
* @return {String} The path of the cloned repository. * @return {String} The path of the cloned repository.
*/ */
async function gitShallowClone(repositoryUrl, branch = 'master', depth = 1) { export async function gitShallowClone(repositoryUrl, branch = 'master', depth = 1) {
const cwd = tempy.directory(); const cwd = temporaryDirectory();
await execa('git', ['clone', '--no-hardlinks', '--no-tags', '-b', branch, '--depth', depth, repositoryUrl, cwd], { await execa('git', ['clone', '--no-hardlinks', '--no-tags', '-b', branch, '--depth', depth, repositoryUrl, cwd], {
cwd, cwd,
@ -187,8 +187,8 @@ async function gitShallowClone(repositoryUrl, branch = 'master', depth = 1) {
* @param {Number} head A commit sha of the remote repo that will become the detached head of the new one. * @param {Number} head A commit sha of the remote repo that will become the detached head of the new one.
* @return {String} The path of the new repository. * @return {String} The path of the new repository.
*/ */
async function gitDetachedHead(repositoryUrl, head) { export async function gitDetachedHead(repositoryUrl, head) {
const cwd = tempy.directory(); const cwd = temporaryDirectory();
await execa('git', ['init'], {cwd}); await execa('git', ['init'], {cwd});
await execa('git', ['remote', 'add', 'origin', repositoryUrl], {cwd}); await execa('git', ['remote', 'add', 'origin', repositoryUrl], {cwd});
@ -197,8 +197,8 @@ async function gitDetachedHead(repositoryUrl, head) {
return cwd; return cwd;
} }
async function gitDetachedHeadFromBranch(repositoryUrl, branch, head) { export async function gitDetachedHeadFromBranch(repositoryUrl, branch, head) {
const cwd = tempy.directory(); const cwd = temporaryDirectory();
await execa('git', ['init'], {cwd}); await execa('git', ['init'], {cwd});
await execa('git', ['remote', 'add', 'origin', repositoryUrl], {cwd}); await execa('git', ['remote', 'add', 'origin', repositoryUrl], {cwd});
@ -215,7 +215,7 @@ async function gitDetachedHeadFromBranch(repositoryUrl, branch, head) {
* @param {String} value Config value. * @param {String} value Config value.
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
async function gitAddConfig(name, value, execaOptions) { export async function gitAddConfig(name, value, execaOptions) {
await execa('git', ['config', '--add', name, value], execaOptions); await execa('git', ['config', '--add', name, value], execaOptions);
} }
@ -227,7 +227,7 @@ async function gitAddConfig(name, value, execaOptions) {
* *
* @return {String} The sha of the commit associated with `tagName` on the local repository. * @return {String} The sha of the commit associated with `tagName` on the local repository.
*/ */
async function gitTagHead(tagName, execaOptions) { export async function gitTagHead(tagName, execaOptions) {
return (await execa('git', ['rev-list', '-1', tagName], execaOptions)).stdout; return (await execa('git', ['rev-list', '-1', tagName], execaOptions)).stdout;
} }
@ -240,7 +240,7 @@ async function gitTagHead(tagName, execaOptions) {
* *
* @return {String} The sha of the commit associated with `tagName` on the remote repository. * @return {String} The sha of the commit associated with `tagName` on the remote repository.
*/ */
async function gitRemoteTagHead(repositoryUrl, tagName, execaOptions) { export async function gitRemoteTagHead(repositoryUrl, tagName, execaOptions) {
return (await execa('git', ['ls-remote', '--tags', repositoryUrl, tagName], execaOptions)).stdout return (await execa('git', ['ls-remote', '--tags', repositoryUrl, tagName], execaOptions)).stdout
.split('\n') .split('\n')
.filter((tag) => Boolean(tag)) .filter((tag) => Boolean(tag))
@ -255,7 +255,7 @@ async function gitRemoteTagHead(repositoryUrl, tagName, execaOptions) {
* *
* @return {String} The tag associatedwith the sha in parameter or `null`. * @return {String} The tag associatedwith the sha in parameter or `null`.
*/ */
async function gitCommitTag(gitHead, execaOptions) { export async function gitCommitTag(gitHead, execaOptions) {
return (await execa('git', ['describe', '--tags', '--exact-match', gitHead], execaOptions)).stdout; return (await execa('git', ['describe', '--tags', '--exact-match', gitHead], execaOptions)).stdout;
} }
@ -268,7 +268,7 @@ async function gitCommitTag(gitHead, execaOptions) {
* *
* @throws {Error} if the push failed. * @throws {Error} if the push failed.
*/ */
async function gitPush(repositoryUrl, branch, execaOptions) { export async function gitPush(repositoryUrl, branch, execaOptions) {
await execa('git', ['push', '--tags', repositoryUrl, `HEAD:${branch}`], execaOptions); await execa('git', ['push', '--tags', repositoryUrl, `HEAD:${branch}`], execaOptions);
} }
@ -278,7 +278,7 @@ async function gitPush(repositoryUrl, branch, execaOptions) {
* @param {String} ref The ref to merge. * @param {String} ref The ref to merge.
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
async function merge(ref, execaOptions) { export async function merge(ref, execaOptions) {
await execa('git', ['merge', '--no-ff', ref], execaOptions); await execa('git', ['merge', '--no-ff', ref], execaOptions);
} }
@ -288,7 +288,7 @@ async function merge(ref, execaOptions) {
* @param {String} ref The ref to merge. * @param {String} ref The ref to merge.
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
async function mergeFf(ref, execaOptions) { export async function mergeFf(ref, execaOptions) {
await execa('git', ['merge', '--ff', ref], execaOptions); await execa('git', ['merge', '--ff', ref], execaOptions);
} }
@ -298,7 +298,7 @@ async function mergeFf(ref, execaOptions) {
* @param {String} ref The ref to merge. * @param {String} ref The ref to merge.
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
async function rebase(ref, execaOptions) { export async function rebase(ref, execaOptions) {
await execa('git', ['rebase', ref], execaOptions); await execa('git', ['rebase', ref], execaOptions);
} }
@ -309,7 +309,7 @@ async function rebase(ref, execaOptions) {
* @param {String} ref The ref to add the note to. * @param {String} ref The ref to add the note to.
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
async function gitAddNote(note, ref, execaOptions) { export async function gitAddNote(note, ref, execaOptions) {
await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'add', '-m', note, ref], execaOptions); await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'add', '-m', note, ref], execaOptions);
} }
@ -319,31 +319,6 @@ async function gitAddNote(note, ref, execaOptions) {
* @param {String} ref The ref to get the note from. * @param {String} ref The ref to get the note from.
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
async function gitGetNote(ref, execaOptions) { export async function gitGetNote(ref, execaOptions) {
return (await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'show', ref], execaOptions)).stdout; return (await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'show', ref], execaOptions)).stdout;
} }
module.exports = {
initGit,
gitRepo,
initBareRepo,
gitCommits,
gitGetCommits,
gitCheckout,
gitFetch,
gitHead,
gitTagVersion,
gitShallowClone,
gitDetachedHead,
gitDetachedHeadFromBranch,
gitAddConfig,
gitTagHead,
gitRemoteTagHead,
gitCommitTag,
gitPush,
merge,
mergeFf,
rebase,
gitAddNote,
gitGetNote,
};

View File

@ -1,7 +1,7 @@
const Docker = require('dockerode'); import Docker from 'dockerode';
const getStream = require('get-stream'); import getStream from 'get-stream';
const pRetry = require('p-retry'); import pRetry from 'p-retry';
const {initBareRepo, gitShallowClone} = require('./git-utils'); import {gitShallowClone, initBareRepo} from './git-utils.js';
const IMAGE = 'semanticrelease/docker-gitbox:latest'; const IMAGE = 'semanticrelease/docker-gitbox:latest';
const SERVER_PORT = 80; const SERVER_PORT = 80;
@ -12,12 +12,12 @@ const GIT_PASSWORD = 'suchsecure';
const docker = new Docker(); const docker = new Docker();
let container; let container;
const gitCredential = `${GIT_USERNAME}:${GIT_PASSWORD}`; export const gitCredential = `${GIT_USERNAME}:${GIT_PASSWORD}`;
/** /**
* Download the `gitbox` Docker image, create a new container and start it. * Download the `gitbox` Docker image, create a new container and start it.
*/ */
async function start() { export async function start() {
await getStream(await docker.pull(IMAGE)); await getStream(await docker.pull(IMAGE));
container = await docker.createContainer({ container = await docker.createContainer({
@ -38,7 +38,7 @@ async function start() {
/** /**
* Stop and remote the `mockserver` Docker container. * Stop and remote the `mockserver` Docker container.
*/ */
async function stop() { export async function stop() {
await container.stop(); await container.stop();
await container.remove(); await container.remove();
} }
@ -51,7 +51,7 @@ async function stop() {
* @param {String} [description=`Repository ${name}`] The repository description. * @param {String} [description=`Repository ${name}`] The repository description.
* @return {Object} The `repositoryUrl` (URL without auth) and `authUrl` (URL with auth). * @return {Object} The `repositoryUrl` (URL without auth) and `authUrl` (URL with auth).
*/ */
async function createRepo(name, branch = 'master', description = `Repository ${name}`) { export async function createRepo(name, branch = 'master', description = `Repository ${name}`) {
const exec = await container.exec({ const exec = await container.exec({
Cmd: ['repo-admin', '-n', name, '-d', description], Cmd: ['repo-admin', '-n', name, '-d', description],
AttachStdout: true, AttachStdout: true,
@ -68,5 +68,3 @@ async function createRepo(name, branch = 'master', description = `Repository ${n
return {cwd, repositoryUrl, authUrl}; return {cwd, repositoryUrl, authUrl};
} }
module.exports = {start, stop, gitCredential, createRepo};

View File

@ -1,8 +1,8 @@
const Docker = require('dockerode'); import Docker from 'dockerode';
const getStream = require('get-stream'); import getStream from 'get-stream';
const got = require('got'); import got from 'got';
const pRetry = require('p-retry'); import pRetry from 'p-retry';
const {mockServerClient} = require('mockserver-client'); import {mockServerClient} from 'mockserver-client';
const IMAGE = 'mockserver/mockserver:latest'; const IMAGE = 'mockserver/mockserver:latest';
const MOCK_SERVER_PORT = 1080; const MOCK_SERVER_PORT = 1080;
@ -13,7 +13,7 @@ let container;
/** /**
* Download the `mockserver` Docker image, create a new container and start it. * Download the `mockserver` Docker image, create a new container and start it.
*/ */
async function start() { export async function start() {
await getStream(await docker.pull(IMAGE)); await getStream(await docker.pull(IMAGE));
container = await docker.createContainer({ container = await docker.createContainer({
@ -38,7 +38,7 @@ async function start() {
/** /**
* Stop and remove the `mockserver` Docker container. * Stop and remove the `mockserver` Docker container.
*/ */
async function stop() { export async function stop() {
await container.stop(); await container.stop();
await container.remove(); await container.remove();
} }
@ -50,7 +50,7 @@ const client = mockServerClient(MOCK_SERVER_HOST, MOCK_SERVER_PORT);
/** /**
* @type {string} the url of the `mockserver` instance * @type {string} the url of the `mockserver` instance
*/ */
const url = `http://${MOCK_SERVER_HOST}:${MOCK_SERVER_PORT}`; export const url = `http://${MOCK_SERVER_HOST}:${MOCK_SERVER_PORT}`;
/** /**
* Set up the `mockserver` instance response for a specific request. * Set up the `mockserver` instance response for a specific request.
@ -65,7 +65,7 @@ const url = `http://${MOCK_SERVER_HOST}:${MOCK_SERVER_PORT}`;
* @param {Object} response.body The JSON object to respond in the response body. * @param {Object} response.body The JSON object to respond in the response body.
* @return {Object} An object representation the expectation. Pass to the `verify` function to validate the `mockserver` has been called with a `request` matching the expectations. * @return {Object} An object representation the expectation. Pass to the `verify` function to validate the `mockserver` has been called with a `request` matching the expectations.
*/ */
async function mock( export async function mock(
path, path,
{body: requestBody, headers: requestHeaders}, {body: requestBody, headers: requestHeaders},
{method = 'POST', statusCode = 200, body: responseBody} {method = 'POST', statusCode = 200, body: responseBody}
@ -96,8 +96,6 @@ async function mock(
* @param {Object} expectation The expectation created with `mock` function. * @param {Object} expectation The expectation created with `mock` function.
* @return {Promise} A Promise that resolves if the expectation is met or reject otherwise. * @return {Promise} A Promise that resolves if the expectation is met or reject otherwise.
*/ */
function verify(expectation) { export function verify(expectation) {
return client.verify(expectation); return client.verify(expectation);
} }
module.exports = {start, stop, mock, verify, url};

View File

@ -1,9 +1,10 @@
const Docker = require('dockerode'); import path, {dirname} from 'node:path';
const getStream = require('get-stream'); import {fileURLToPath} from 'node:url';
const got = require('got'); import Docker from 'dockerode';
const path = require('path'); import getStream from 'get-stream';
const delay = require('delay'); import got from 'got';
const pRetry = require('p-retry'); import delay from 'delay';
import pRetry from 'p-retry';
const IMAGE = 'verdaccio/verdaccio:4'; const IMAGE = 'verdaccio/verdaccio:4';
const REGISTRY_PORT = 4873; const REGISTRY_PORT = 4873;
@ -12,12 +13,13 @@ const NPM_USERNAME = 'integration';
const NPM_PASSWORD = 'suchsecure'; const NPM_PASSWORD = 'suchsecure';
const NPM_EMAIL = 'integration@test.com'; const NPM_EMAIL = 'integration@test.com';
const docker = new Docker(); const docker = new Docker();
const __dirname = dirname(fileURLToPath(import.meta.url));
let container; let container;
/** /**
* Download the `npm-registry-docker` Docker image, create a new container and start it. * Download the `npm-registry-docker` Docker image, create a new container and start it.
*/ */
async function start() { export async function start() {
await getStream(await docker.pull(IMAGE)); await getStream(await docker.pull(IMAGE));
container = await docker.createContainer({ container = await docker.createContainer({
@ -55,9 +57,9 @@ async function start() {
}); });
} }
const url = `http://${REGISTRY_HOST}:${REGISTRY_PORT}/`; export const url = `http://${REGISTRY_HOST}:${REGISTRY_PORT}/`;
const authEnv = { export const authEnv = {
npm_config_registry: url, // eslint-disable-line camelcase npm_config_registry: url, // eslint-disable-line camelcase
NPM_USERNAME, NPM_USERNAME,
NPM_PASSWORD, NPM_PASSWORD,
@ -67,9 +69,7 @@ const authEnv = {
/** /**
* Stop and remote the `npm-registry-docker` Docker container. * Stop and remote the `npm-registry-docker` Docker container.
*/ */
async function stop() { export async function stop() {
await container.stop(); await container.stop();
await container.remove(); await container.remove();
} }
module.exports = {start, stop, authEnv, url};

View File

@ -1,7 +1,5 @@
const execa = require('execa'); import {execa} from 'execa';
async function npmView(packageName, env) { export async function npmView(packageName, env) {
return JSON.parse((await execa('npm', ['view', packageName, '--json'], {env})).stdout); return JSON.parse((await execa('npm', ['view', packageName, '--json'], {env})).stdout);
} }
module.exports = {npmView};

View File

@ -1,7 +1,7 @@
const test = require('ava'); import test from 'ava';
const {repeat} = require('lodash'); import {repeat} from 'lodash-es';
const hideSensitive = require('../lib/hide-sensitive'); import hideSensitive from '../lib/hide-sensitive.js';
const {SECRET_REPLACEMENT, SECRET_MIN_SIZE} = require('../lib/definitions/constants'); import {SECRET_MIN_SIZE, SECRET_REPLACEMENT} from '../lib/definitions/constants.js';
test('Replace multiple sensitive environment variable values', (t) => { test('Replace multiple sensitive environment variable values', (t) => {
const env = {SOME_PASSWORD: 'password', SOME_TOKEN: 'secret'}; const env = {SOME_PASSWORD: 'password', SOME_TOKEN: 'secret'};

View File

@ -1,29 +1,28 @@
const test = require('ava'); import test from 'ava';
const {escapeRegExp, isString, sortBy, omit} = require('lodash'); import {escapeRegExp, isString, omit, sortBy} from 'lodash-es';
const td = require('testdouble'); import * as td from 'testdouble';
const {spy, stub} = require('sinon'); import {spy, stub} from 'sinon';
const {WritableStreamBuffer} = require('stream-buffers'); import {WritableStreamBuffer} from 'stream-buffers';
const AggregateError = require('aggregate-error'); import AggregateError from 'aggregate-error';
const SemanticReleaseError = require('@semantic-release/error'); import SemanticReleaseError from '@semantic-release/error';
const {COMMIT_NAME, COMMIT_EMAIL, SECRET_REPLACEMENT} = require('../lib/definitions/constants'); import {COMMIT_EMAIL, COMMIT_NAME, SECRET_REPLACEMENT} from '../lib/definitions/constants.js';
const { import {
gitHead: getGitHead, gitAddNote,
gitCheckout, gitCheckout,
gitTagHead,
gitRepo,
gitCommits, gitCommits,
gitTagVersion, gitGetNote,
gitRemoteTagHead, gitHead as getGitHead,
gitPush, gitPush,
gitRemoteTagHead,
gitRepo,
gitShallowClone, gitShallowClone,
gitTagHead,
gitTagVersion,
merge, merge,
mergeFf, mergeFf,
rebase, rebase
gitAddNote, } from './helpers/git-utils.js';
gitGetNote, import pluginNoop from './fixtures/plugin-noop.cjs';
} = require('./helpers/git-utils');
const pluginNoop = require.resolve('./fixtures/plugin-noop');
test.beforeEach((t) => { test.beforeEach((t) => {
// Stub the logger functions // Stub the logger functions
@ -143,9 +142,9 @@ test('Plugins are called with expected values', async (t) => {
{...nextRelease, notes: `${notes1}\n\n${notes2}\n\n${notes3}`, pluginName: pluginNoop}, {...nextRelease, notes: `${notes1}\n\n${notes2}\n\n${notes3}`, pluginName: pluginNoop},
]; ];
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => envCi); await td.replaceEsm('env-ci', null, () => envCi);
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
const result = await semanticRelease(options, { const result = await semanticRelease(options, {
cwd, cwd,
env, env,
@ -418,9 +417,9 @@ test('Use custom tag format', async (t) => {
fail: stub().resolves(), fail: stub().resolves(),
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
t.truthy( t.truthy(
await semanticRelease(options, { await semanticRelease(options, {
cwd, cwd,
@ -476,9 +475,9 @@ test('Use new gitHead, and recreate release notes if a prepare plugin create a c
fail: stub().resolves(), fail: stub().resolves(),
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
t.truthy( t.truthy(
await semanticRelease(options, { await semanticRelease(options, {
@ -542,9 +541,9 @@ test('Make a new release when a commit is forward-ported to an upper branch', as
success, success,
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
t.truthy(await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}})); t.truthy(await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}}));
t.is(addChannel.callCount, 0); t.is(addChannel.callCount, 0);
@ -576,9 +575,9 @@ test('Publish a pre-release version', async (t) => {
fail: stub().resolves(), fail: stub().resolves(),
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'beta', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'beta', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
let {releases} = await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}}); let {releases} = await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}});
t.is(releases.length, 1); t.is(releases.length, 1);
@ -628,9 +627,9 @@ test('Publish releases from different branch on the same channel', async (t) =>
fail: stub().resolves(), fail: stub().resolves(),
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'next', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'next', isPr: false}));
let semanticRelease = require('..'); let semanticRelease = (await import('../index.js')).default;
let {releases} = await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}}); let {releases} = await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}});
t.is(releases.length, 1); t.is(releases.length, 1);
@ -653,9 +652,9 @@ test('Publish releases from different branch on the same channel', async (t) =>
await merge('next', {cwd}); await merge('next', {cwd});
await gitPush('origin', 'master', {cwd}); await gitPush('origin', 'master', {cwd});
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
semanticRelease = require('..'); semanticRelease = (await import('../index.js')).default;
t.falsy(await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}})); t.falsy(await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}}));
t.is(addChannel.callCount, 0); t.is(addChannel.callCount, 0);
@ -686,9 +685,9 @@ test('Publish pre-releases the same channel as regular releases', async (t) => {
fail: stub().resolves(), fail: stub().resolves(),
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'beta', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'beta', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
let {releases} = await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}}); let {releases} = await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}});
t.is(releases.length, 1); t.is(releases.length, 1);
@ -751,9 +750,9 @@ test('Do not add pre-releases to a different channel', async (t) => {
success, success,
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
t.truthy(await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}})); t.truthy(await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}}));
t.is(addChannel.callCount, 0); t.is(addChannel.callCount, 0);
@ -819,9 +818,9 @@ async function addChannelMacro(t, mergeFunction) {
gitHead: commits[2].hash, gitHead: commits[2].hash,
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
const result = await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}}); const result = await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}});
t.deepEqual(result.releases, [ t.deepEqual(result.releases, [
@ -885,9 +884,9 @@ test('Call all "success" plugins even if one errors out', async (t) => {
success: [success1, success2], success: [success1, success2],
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
await t.throwsAsync( await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()}) semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()})
@ -929,9 +928,9 @@ test('Log all "verifyConditions" errors', async (t) => {
fail, fail,
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
const errors = [ const errors = [
...(await t.throwsAsync( ...(await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()}) semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()})
@ -973,9 +972,9 @@ test('Log all "verifyRelease" errors', async (t) => {
fail, fail,
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
const errors = [ const errors = [
...(await t.throwsAsync( ...(await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()}) semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()})
@ -1026,9 +1025,9 @@ test('Dry-run skips addChannel, prepare, publish and success', async (t) => {
success, success,
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
t.truthy( t.truthy(
await semanticRelease(options, { await semanticRelease(options, {
cwd, cwd,
@ -1078,9 +1077,9 @@ test('Dry-run skips fail', async (t) => {
fail, fail,
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
const errors = [ const errors = [
...(await t.throwsAsync( ...(await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()}) semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()})
@ -1137,9 +1136,9 @@ test('Force a dry-run if not on a CI and "noCi" is not explicitly set', async (t
fail: stub().resolves(), fail: stub().resolves(),
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: false, branch: 'master'})); await td.replaceEsm('env-ci', null, () => ({isCi: false, branch: 'master'}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
t.truthy( t.truthy(
await semanticRelease(options, { await semanticRelease(options, {
cwd, cwd,
@ -1186,9 +1185,9 @@ test('Dry-run does not print changelog if "generateNotes" return "undefined"', a
success: false, success: false,
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
t.truthy( t.truthy(
await semanticRelease(options, { await semanticRelease(options, {
cwd, cwd,
@ -1244,9 +1243,9 @@ test('Allow local releases with "noCi" option', async (t) => {
fail: stub().resolves(), fail: stub().resolves(),
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: false, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: false, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
t.truthy( t.truthy(
await semanticRelease(options, { await semanticRelease(options, {
cwd, cwd,
@ -1313,9 +1312,9 @@ test('Accept "undefined" value returned by "generateNotes" and "false" by "publi
fail: stub().resolves(), fail: stub().resolves(),
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
t.truthy( t.truthy(
await semanticRelease(options, { await semanticRelease(options, {
cwd, cwd,
@ -1341,9 +1340,9 @@ test('Returns false if triggered by a PR', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const {cwd, repositoryUrl} = await gitRepo(true); const {cwd, repositoryUrl} = await gitRepo(true);
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', prBranch: 'patch-1', isPr: true})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', prBranch: 'patch-1', isPr: true}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
t.false( t.false(
await semanticRelease( await semanticRelease(
@ -1393,9 +1392,9 @@ test('Throws "EINVALIDNEXTVERSION" if next release is out of range of the curren
success, success,
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: '1.x', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: '1.x', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
const error = await t.throwsAsync( const error = await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}}) semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}})
@ -1444,9 +1443,9 @@ test('Throws "EINVALIDNEXTVERSION" if next release is out of range of the curren
success, success,
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
const error = await t.throwsAsync( const error = await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}}) semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}})
@ -1503,9 +1502,9 @@ test('Throws "EINVALIDMAINTENANCEMERGE" if merge an out of range release in a ma
fail, fail,
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: '1.1.x', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: '1.1.x', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = await import('../index.js');
const errors = [ const errors = [
...(await t.throwsAsync( ...(await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}}) semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}})
@ -1539,9 +1538,9 @@ test('Returns false value if triggered on an outdated clone', async (t) => {
await gitCommits(['Third'], {cwd}); await gitCommits(['Third'], {cwd});
await gitPush(repositoryUrl, 'master', {cwd}); await gitPush(repositoryUrl, 'master', {cwd});
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
t.false( t.false(
await semanticRelease( await semanticRelease(
@ -1571,9 +1570,9 @@ test('Returns false if not running from the configured branch', async (t) => {
fail: stub().resolves(), fail: stub().resolves(),
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'other-branch', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'other-branch', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
t.false( t.false(
await semanticRelease(options, { await semanticRelease(options, {
@ -1615,9 +1614,9 @@ test('Returns false if there is no relevant changes', async (t) => {
fail: stub().resolves(), fail: stub().resolves(),
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
t.false( t.false(
await semanticRelease(options, { await semanticRelease(options, {
@ -1670,9 +1669,9 @@ test('Exclude commits with [skip release] or [release skip] from analysis', asyn
fail: stub().resolves(), fail: stub().resolves(),
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
await semanticRelease(options, { await semanticRelease(options, {
cwd, cwd,
env: {}, env: {},
@ -1697,9 +1696,9 @@ test('Log both plugins errors and errors thrown by "fail" plugin', async (t) =>
verifyConditions: stub().rejects(pluginError), verifyConditions: stub().rejects(pluginError),
fail: [stub().rejects(failError1), stub().rejects(failError2)], fail: [stub().rejects(failError1), stub().rejects(failError2)],
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
await t.throwsAsync( await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()}) semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()})
@ -1721,9 +1720,9 @@ test('Call "fail" only if a plugin returns a SemanticReleaseError', async (t) =>
verifyConditions: stub().rejects(pluginError), verifyConditions: stub().rejects(pluginError),
fail, fail,
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
await t.throwsAsync( await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()}) semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()})
@ -1737,9 +1736,9 @@ test('Throw SemanticReleaseError if repositoryUrl is not set and cannot be found
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
const errors = [ const errors = [
...(await t.throwsAsync( ...(await t.throwsAsync(
semanticRelease({}, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()}) semanticRelease({}, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()})
@ -1776,9 +1775,9 @@ test('Throw an Error if plugin returns an unexpected value', async (t) => {
fail: stub().resolves(), fail: stub().resolves(),
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
const error = await t.throwsAsync( const error = await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()}), semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()}),
{instanceOf: SemanticReleaseError} {instanceOf: SemanticReleaseError}
@ -1805,9 +1804,9 @@ test('Hide sensitive information passed to "fail" plugin', async (t) => {
fail, fail,
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
await t.throwsAsync( await t.throwsAsync(
semanticRelease(options, {cwd, env, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()}) semanticRelease(options, {cwd, env, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()})
); );
@ -1849,9 +1848,9 @@ test('Hide sensitive information passed to "success" plugin', async (t) => {
fail: stub().resolves(), fail: stub().resolves(),
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
await semanticRelease(options, {cwd, env, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()}); await semanticRelease(options, {cwd, env, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()});
const release = success.args[0][1].releases[0]; const release = success.args[0][1].releases[0];
@ -1898,9 +1897,9 @@ test('Get all commits including the ones not in the shallow clone', async (t) =>
fail: stub().resolves(), fail: stub().resolves(),
}; };
td.replace('../lib/get-logger', () => t.context.logger); await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
t.truthy( t.truthy(
await semanticRelease(options, { await semanticRelease(options, {
cwd, cwd,

View File

@ -1,28 +1,31 @@
const path = require('path'); import path from 'path';
const test = require('ava'); import test from 'ava';
const td = require('testdouble'); import * as td from 'testdouble';
const {escapeRegExp} = require('lodash'); import {escapeRegExp} from 'lodash-es';
const {writeJson, readJson} = require('fs-extra'); import fsExtra from 'fs-extra';
const execa = require('execa'); import {execa} from 'execa';
const {WritableStreamBuffer} = require('stream-buffers'); import {WritableStreamBuffer} from 'stream-buffers';
const delay = require('delay'); import delay from 'delay';
const getAuthUrl = require('../lib/get-git-auth-url');
const {SECRET_REPLACEMENT} = require('../lib/definitions/constants'); import getAuthUrl from '../lib/get-git-auth-url.js';
const { import {SECRET_REPLACEMENT} from '../lib/definitions/constants.js';
gitHead, import {
gitTagHead,
gitRepo,
gitCommits,
gitRemoteTagHead,
gitPush,
gitCheckout, gitCheckout,
merge, gitCommits,
gitGetNote, gitGetNote,
} = require('./helpers/git-utils'); gitHead,
const {npmView} = require('./helpers/npm-utils'); gitPush,
const gitbox = require('./helpers/gitbox'); gitRemoteTagHead,
const mockServer = require('./helpers/mockserver'); gitRepo,
const npmRegistry = require('./helpers/npm-registry'); gitTagHead,
merge
} from './helpers/git-utils.js';
import {npmView} from './helpers/npm-utils.js';
import * as gitbox from './helpers/gitbox.js';
import * as mockServer from './helpers/mockserver.js';
import * as npmRegistry from './helpers/npm-registry.js';
const {readJson, writeJson} = fsExtra;
/* eslint camelcase: ["error", {properties: "never"}] */ /* eslint camelcase: ["error", {properties: "never"}] */
@ -47,10 +50,10 @@ const npmTestEnv = {
LEGACY_TOKEN: Buffer.from(`${env.NPM_USERNAME}:${env.NPM_PASSWORD}`, 'utf8').toString('base64'), LEGACY_TOKEN: Buffer.from(`${env.NPM_USERNAME}:${env.NPM_PASSWORD}`, 'utf8').toString('base64'),
}; };
const cli = require.resolve('../bin/semantic-release'); const cli = path.resolve('./bin/semantic-release.js');
const pluginError = require.resolve('./fixtures/plugin-error'); const pluginError = path.resolve('./test/fixtures/plugin-error');
const pluginInheritedError = require.resolve('./fixtures/plugin-error-inherited'); const pluginInheritedError = path.resolve('./test/fixtures/plugin-error-inherited');
const pluginLogEnv = require.resolve('./fixtures/plugin-log-env'); const pluginLogEnv = path.resolve('./test/fixtures/plugin-log-env');
test.before(async () => { test.before(async () => {
await Promise.all([gitbox.start(), npmRegistry.start(), mockServer.start()]); await Promise.all([gitbox.start(), npmRegistry.start(), mockServer.start()]);
@ -509,7 +512,7 @@ test('Pass options via CLI arguments', async (t) => {
test('Run via JS API', async (t) => { test('Run via JS API', async (t) => {
td.replace('../lib/logger', {log: () => {}, error: () => {}, stdout: () => {}}); td.replace('../lib/logger', {log: () => {}, error: () => {}, stdout: () => {}});
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false})); td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..'); const semanticRelease = (await import('../index.js')).default;
const packageName = 'test-js-api'; const packageName = 'test-js-api';
const owner = 'git'; const owner = 'git';
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
@ -656,6 +659,8 @@ test('Hide sensitive environment variable values from the logs', async (t) => {
extendEnv: false, extendEnv: false,
}); });
console.log({stderr})
t.regex(stdout, new RegExp(`Console: Exposing token ${escapeRegExp(SECRET_REPLACEMENT)}`)); t.regex(stdout, new RegExp(`Console: Exposing token ${escapeRegExp(SECRET_REPLACEMENT)}`));
t.regex(stdout, new RegExp(`Log: Exposing token ${escapeRegExp(SECRET_REPLACEMENT)}`)); t.regex(stdout, new RegExp(`Log: Exposing token ${escapeRegExp(SECRET_REPLACEMENT)}`));
t.regex(stderr, new RegExp(`Error: Console token ${escapeRegExp(SECRET_REPLACEMENT)}`)); t.regex(stderr, new RegExp(`Error: Console token ${escapeRegExp(SECRET_REPLACEMENT)}`));

View File

@ -1,7 +1,7 @@
const test = require('ava'); import test from 'ava';
const {noop} = require('lodash'); import {noop} from 'lodash-es';
const {stub} = require('sinon'); import {stub} from 'sinon';
const normalize = require('../../lib/plugins/normalize'); import normalize from '../../lib/plugins/normalize.js';
const cwd = process.cwd(); const cwd = process.cwd();
@ -23,37 +23,37 @@ test('Normalize and load plugin from string', async (t) => {
const plugin = await normalize( const plugin = await normalize(
{cwd, options: {}, logger: t.context.logger}, {cwd, options: {}, logger: t.context.logger},
'verifyConditions', 'verifyConditions',
'./test/fixtures/plugin-noop', './test/fixtures/plugin-noop.cjs',
{} {}
); );
t.is(plugin.pluginName, './test/fixtures/plugin-noop'); t.is(plugin.pluginName, './test/fixtures/plugin-noop.cjs');
t.is(typeof plugin, 'function'); t.is(typeof plugin, 'function');
t.deepEqual(t.context.success.args[0], ['Loaded plugin "verifyConditions" from "./test/fixtures/plugin-noop"']); t.deepEqual(t.context.success.args[0], ['Loaded plugin "verifyConditions" from "./test/fixtures/plugin-noop.cjs"']);
}); });
test('Normalize and load plugin from object', async (t) => { test('Normalize and load plugin from object', async (t) => {
const plugin = await normalize( const plugin = await normalize(
{cwd, options: {}, logger: t.context.logger}, {cwd, options: {}, logger: t.context.logger},
'publish', 'publish',
{path: './test/fixtures/plugin-noop'}, {path: './test/fixtures/plugin-noop.cjs'},
{} {}
); );
t.is(plugin.pluginName, './test/fixtures/plugin-noop'); t.is(plugin.pluginName, './test/fixtures/plugin-noop.cjs');
t.is(typeof plugin, 'function'); t.is(typeof plugin, 'function');
t.deepEqual(t.context.success.args[0], ['Loaded plugin "publish" from "./test/fixtures/plugin-noop"']); t.deepEqual(t.context.success.args[0], ['Loaded plugin "publish" from "./test/fixtures/plugin-noop.cjs"']);
}); });
test('Normalize and load plugin from a base file path', async (t) => { test('Normalize and load plugin from a base file path', async (t) => {
const plugin = await normalize({cwd, options: {}, logger: t.context.logger}, 'verifyConditions', './plugin-noop', { const plugin = await normalize({cwd, options: {}, logger: t.context.logger}, 'verifyConditions', './plugin-noop.cjs', {
'./plugin-noop': './test/fixtures', './plugin-noop.cjs': './test/fixtures',
}); });
t.is(plugin.pluginName, './plugin-noop'); t.is(plugin.pluginName, './plugin-noop.cjs');
t.is(typeof plugin, 'function'); t.is(typeof plugin, 'function');
t.deepEqual(t.context.success.args[0], [ t.deepEqual(t.context.success.args[0], [
'Loaded plugin "verifyConditions" from "./plugin-noop" in shareable config "./test/fixtures"', 'Loaded plugin "verifyConditions" from "./plugin-noop.cjs" in shareable config "./test/fixtures"',
]); ]);
}); });
@ -72,7 +72,7 @@ test('Wrap plugin in a function that add the "pluginName" to multiple errors"',
'./plugin-errors': './test/fixtures', './plugin-errors': './test/fixtures',
}); });
const errors = [...(await t.throwsAsync(plugin({options: {}})))]; const errors = [...(await t.throwsAsync(plugin({options: {}}))).errors];
for (const error of errors) { for (const error of errors) {
t.is(error.pluginName, './plugin-errors'); t.is(error.pluginName, './plugin-errors');
} }
@ -90,12 +90,12 @@ test('Normalize and load plugin that retuns multiple functions', async (t) => {
const plugin = await normalize( const plugin = await normalize(
{cwd, options: {}, logger: t.context.logger}, {cwd, options: {}, logger: t.context.logger},
'verifyConditions', 'verifyConditions',
'./test/fixtures/multi-plugin', './test/fixtures/multi-plugin.cjs',
{} {}
); );
t.is(typeof plugin, 'function'); t.is(typeof plugin, 'function');
t.deepEqual(t.context.success.args[0], ['Loaded plugin "verifyConditions" from "./test/fixtures/multi-plugin"']); t.deepEqual(t.context.success.args[0], ['Loaded plugin "verifyConditions" from "./test/fixtures/multi-plugin.cjs"']);
}); });
test('Wrap "analyzeCommits" plugin in a function that validate the output of the plugin', async (t) => { test('Wrap "analyzeCommits" plugin in a function that validate the output of the plugin', async (t) => {
@ -258,7 +258,7 @@ test('Always pass a defined "pluginConfig" for plugin defined with path', async
test('Throws an error if the plugin return an object without the expected plugin function', async (t) => { test('Throws an error if the plugin return an object without the expected plugin function', async (t) => {
const error = await t.throwsAsync(() => const error = await t.throwsAsync(() =>
normalize({cwd, options: {}, logger: t.context.logger}, 'inexistantPlugin', './test/fixtures/multi-plugin', {}) normalize({cwd, options: {}, logger: t.context.logger}, 'nonExistentPlugin', './test/fixtures/multi-plugin.cjs', {})
); );
t.is(error.code, 'EPLUGIN'); t.is(error.code, 'EPLUGIN');
@ -269,7 +269,7 @@ test('Throws an error if the plugin return an object without the expected plugin
test('Throws an error if the plugin is not found', async (t) => { test('Throws an error if the plugin is not found', async (t) => {
await t.throwsAsync( await t.throwsAsync(
() => normalize({cwd, options: {}, logger: t.context.logger}, 'inexistantPlugin', 'non-existing-path', {}), () => normalize({cwd, options: {}, logger: t.context.logger}, 'nonExistentPlugin', 'non-existing-path', {}),
{ {
message: /Cannot find module 'non-existing-path'/, message: /Cannot find module 'non-existing-path'/,
code: 'MODULE_NOT_FOUND', code: 'MODULE_NOT_FOUND',

View File

@ -1,7 +1,7 @@
const test = require('ava'); import test from 'ava';
const {stub} = require('sinon'); import {stub} from 'sinon';
const AggregateError = require('aggregate-error'); import AggregateError from 'aggregate-error';
const pipeline = require('../../lib/plugins/pipeline'); import pipeline from '../../lib/plugins/pipeline.js';
test('Execute each function in series passing the same input', async (t) => { test('Execute each function in series passing the same input', async (t) => {
const step1 = stub().resolves(1); const step1 = stub().resolves(1);
@ -116,9 +116,9 @@ test('Throw all errors from the first step throwing an AggregateError', async (t
const step2 = stub().rejects(new AggregateError([error1, error2])); const step2 = stub().rejects(new AggregateError([error1, error2]));
const step3 = stub().resolves(3); const step3 = stub().resolves(3);
const errors = await t.throwsAsync(pipeline([step1, step2, step3])(0)); const error = await t.throwsAsync(pipeline([step1, step2, step3])(0));
t.deepEqual([...errors], [error1, error2]); t.deepEqual([...error.errors], [error1, error2]);
t.true(step1.calledWith(0)); t.true(step1.calledWith(0));
t.true(step2.calledWith(0)); t.true(step2.calledWith(0));
t.true(step3.notCalled); t.true(step3.notCalled);
@ -131,9 +131,9 @@ test('Execute all even if a Promise rejects', async (t) => {
const step2 = stub().rejects(error1); const step2 = stub().rejects(error1);
const step3 = stub().rejects(error2); const step3 = stub().rejects(error2);
const errors = await t.throwsAsync(pipeline([step1, step2, step3], {settleAll: true})(0)); const error = await t.throwsAsync(pipeline([step1, step2, step3], {settleAll: true})(0));
t.deepEqual([...errors], [error1, error2]); t.deepEqual([...error.errors], [error1, error2]);
t.true(step1.calledWith(0)); t.true(step1.calledWith(0));
t.true(step2.calledWith(0)); t.true(step2.calledWith(0));
t.true(step3.calledWith(0)); t.true(step3.calledWith(0));
@ -147,9 +147,9 @@ test('Throw all errors from all steps throwing an AggregateError', async (t) =>
const step1 = stub().rejects(new AggregateError([error1, error2])); const step1 = stub().rejects(new AggregateError([error1, error2]));
const step2 = stub().rejects(new AggregateError([error3, error4])); const step2 = stub().rejects(new AggregateError([error3, error4]));
const errors = await t.throwsAsync(pipeline([step1, step2], {settleAll: true})(0)); const error = await t.throwsAsync(pipeline([step1, step2], {settleAll: true})(0));
t.deepEqual([...errors], [error1, error2, error3, error4]); t.deepEqual([...error.errors], [error1, error2, error3, error4]);
t.true(step1.calledWith(0)); t.true(step1.calledWith(0));
t.true(step2.calledWith(0)); t.true(step2.calledWith(0));
}); });
@ -163,9 +163,9 @@ test('Execute each function in series passing a transformed input even if a step
const step4 = stub().resolves(4); const step4 = stub().resolves(4);
const getNextInput = (previousResult, result) => previousResult + result; const getNextInput = (previousResult, result) => previousResult + result;
const errors = await t.throwsAsync(pipeline([step1, step2, step3, step4], {settleAll: true, getNextInput})(0)); const error = await t.throwsAsync(pipeline([step1, step2, step3, step4], {settleAll: true, getNextInput})(0));
t.deepEqual([...errors], [error2, error3]); t.deepEqual([...error.errors], [error2, error3]);
t.true(step1.calledWith(0)); t.true(step1.calledWith(0));
t.true(step2.calledWith(0 + 1)); t.true(step2.calledWith(0 + 1));
t.true(step3.calledWith(0 + 1 + error2)); t.true(step3.calledWith(0 + 1 + error2));

View File

@ -1,11 +1,11 @@
const path = require('path'); import path from 'path';
const test = require('ava'); import test from 'ava';
const {copy, outputFile} = require('fs-extra'); import {copy, outputFile} from 'fs-extra';
const {stub} = require('sinon'); import {stub} from 'sinon';
const tempy = require('tempy'); import {temporaryDirectory} from 'tempy';
const getPlugins = require('../../lib/plugins'); import getPlugins from '../../lib/plugins/index.js';
// Save the current working diretory // Save the current working directory
const cwd = process.cwd(); const cwd = process.cwd();
test.beforeEach((t) => { test.beforeEach((t) => {
@ -35,9 +35,9 @@ test('Export plugins based on steps config', async (t) => {
cwd, cwd,
logger: t.context.logger, logger: t.context.logger,
options: { options: {
verifyConditions: ['./test/fixtures/plugin-noop', {path: './test/fixtures/plugin-noop'}], verifyConditions: ['./test/fixtures/plugin-noop.cjs', {path: './test/fixtures/plugin-noop.cjs'}],
generateNotes: './test/fixtures/plugin-noop', generateNotes: './test/fixtures/plugin-noop.cjs',
analyzeCommits: {path: './test/fixtures/plugin-noop'}, analyzeCommits: {path: './test/fixtures/plugin-noop.cjs'},
verifyRelease: () => {}, verifyRelease: () => {},
}, },
}, },
@ -137,9 +137,9 @@ test('Unknown steps of plugins configured in "plugins" are ignored', async (t) =
}); });
test('Export plugins loaded from the dependency of a shareable config module', async (t) => { test('Export plugins loaded from the dependency of a shareable config module', async (t) => {
const cwd = tempy.directory(); const cwd = temporaryDirectory();
await copy( await copy(
'./test/fixtures/plugin-noop.js', './test/fixtures/plugin-noop.cjs',
path.resolve(cwd, 'node_modules/shareable-config/node_modules/custom-plugin/index.js') path.resolve(cwd, 'node_modules/shareable-config/node_modules/custom-plugin/index.js')
); );
await outputFile(path.resolve(cwd, 'node_modules/shareable-config/index.js'), ''); await outputFile(path.resolve(cwd, 'node_modules/shareable-config/index.js'), '');
@ -170,8 +170,8 @@ test('Export plugins loaded from the dependency of a shareable config module', a
}); });
test('Export plugins loaded from the dependency of a shareable config file', async (t) => { test('Export plugins loaded from the dependency of a shareable config file', async (t) => {
const cwd = tempy.directory(); const cwd = temporaryDirectory();
await copy('./test/fixtures/plugin-noop.js', path.resolve(cwd, 'plugin/plugin-noop.js')); await copy('./test/fixtures/plugin-noop.cjs', path.resolve(cwd, 'plugin/plugin-noop.cjs'));
await outputFile(path.resolve(cwd, 'shareable-config.js'), ''); await outputFile(path.resolve(cwd, 'shareable-config.js'), '');
const plugins = await getPlugins( const plugins = await getPlugins(
@ -179,9 +179,9 @@ test('Export plugins loaded from the dependency of a shareable config file', asy
cwd, cwd,
logger: t.context.logger, logger: t.context.logger,
options: { options: {
verifyConditions: ['./plugin/plugin-noop', {path: './plugin/plugin-noop'}], verifyConditions: ['./plugin/plugin-noop.cjs', {path: './plugin/plugin-noop.cjs'}],
generateNotes: './plugin/plugin-noop', generateNotes: './plugin/plugin-noop.cjs',
analyzeCommits: {path: './plugin/plugin-noop'}, analyzeCommits: {path: './plugin/plugin-noop.cjs'},
verifyRelease: () => {}, verifyRelease: () => {},
}, },
}, },
@ -269,7 +269,7 @@ test('Throw an error for each invalid plugin configuration', async (t) => {
}, },
{} {}
) )
)), )).errors,
]; ];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, 'SemanticReleaseError');
@ -289,11 +289,11 @@ test('Throw EPLUGINSCONF error if the "plugins" option contains an old plugin de
{ {
cwd, cwd,
logger: t.context.logger, logger: t.context.logger,
options: {plugins: ['./test/fixtures/multi-plugin', './test/fixtures/plugin-noop', () => {}]}, options: {plugins: ['./test/fixtures/multi-plugin.cjs', './test/fixtures/plugin-noop.cjs', () => {}]},
}, },
{} {}
) )
)), )).errors,
]; ];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, 'SemanticReleaseError');
@ -306,7 +306,7 @@ test('Throw EPLUGINSCONF error for each invalid definition if the "plugins" opti
const errors = [ const errors = [
...(await t.throwsAsync(() => ...(await t.throwsAsync(() =>
getPlugins({cwd, logger: t.context.logger, options: {plugins: [1, {path: 1}, [() => {}, {}, {}]]}}, {}) getPlugins({cwd, logger: t.context.logger, options: {plugins: [1, {path: 1}, [() => {}, {}, {}]]}}, {})
)), )).errors,
]; ];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, 'SemanticReleaseError');

View File

@ -1,5 +1,5 @@
const test = require('ava'); import test from 'ava';
const {validatePlugin, validateStep, loadPlugin, parseConfig} = require('../../lib/plugins/utils'); import {loadPlugin, parseConfig, validatePlugin, validateStep} from '../../lib/plugins/utils.js';
test('validatePlugin', (t) => { test('validatePlugin', (t) => {
const path = 'plugin-module'; const path = 'plugin-module';
@ -193,10 +193,10 @@ test('loadPlugin', async (t) => {
const cwd = process.cwd(); const cwd = process.cwd();
const func = () => {}; const func = () => {};
t.is(require('../fixtures/plugin-noop'), await loadPlugin({cwd: './test/fixtures'}, './plugin-noop', {}), 'From cwd'); t.is((await import('../fixtures/plugin-noop.cjs')).default, await loadPlugin({cwd: './test/fixtures'}, './plugin-noop.cjs', {}), 'From cwd');
t.is( t.is(
require('../fixtures/plugin-noop'), (await import('../fixtures/plugin-noop.cjs')).default,
await loadPlugin({cwd}, './plugin-noop', {'./plugin-noop': './test/fixtures'}), await loadPlugin({cwd}, './plugin-noop.cjs', {'./plugin-noop.cjs': './test/fixtures'}),
'From a shareable config context' 'From a shareable config context'
); );
t.is(func, await loadPlugin({cwd}, func, {}), 'Defined as a function'); t.is(func, await loadPlugin({cwd}, func, {}), 'Defined as a function');

View File

@ -1,21 +1,21 @@
const test = require('ava'); import test from 'ava';
const AggregateError = require('aggregate-error'); import AggregateError from 'aggregate-error';
const { import {
extractErrors, extractErrors,
tagsToVersions,
isMajorRange,
isMaintenanceRange,
getUpperBound,
getLowerBound,
highest,
lowest,
getLatestVersion,
getEarliestVersion, getEarliestVersion,
getFirstVersion, getFirstVersion,
getLatestVersion,
getLowerBound,
getRange, getRange,
makeTag, getUpperBound,
highest,
isMaintenanceRange,
isMajorRange,
isSameChannel, isSameChannel,
} = require('../lib/utils'); lowest,
makeTag,
tagsToVersions
} from '../lib/utils.js';
test('extractErrors', (t) => { test('extractErrors', (t) => {
const errors = [new Error('Error 1'), new Error('Error 2')]; const errors = [new Error('Error 1'), new Error('Error 2')];

View File

@ -1,13 +1,13 @@
const test = require('ava'); import test from 'ava';
const tempy = require('tempy'); import {temporaryDirectory} from 'tempy';
const verify = require('../lib/verify'); import verify from '../lib/verify.js';
const {gitRepo} = require('./helpers/git-utils'); import {gitRepo} from './helpers/git-utils.js';
test('Throw a AggregateError', async (t) => { test('Throw a AggregateError', async (t) => {
const {cwd} = await gitRepo(); const {cwd} = await gitRepo();
const options = {branches: [{name: 'master'}, {name: ''}]}; const options = {branches: [{name: 'master'}, {name: ''}]};
const errors = [...(await t.throwsAsync(verify({cwd, options})))]; const errors = [...(await t.throwsAsync(verify({cwd, options}))).errors];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'ENOREPOURL'); t.is(errors[0].code, 'ENOREPOURL');
@ -28,10 +28,10 @@ test('Throw a AggregateError', async (t) => {
}); });
test('Throw a SemanticReleaseError if does not run on a git repository', async (t) => { test('Throw a SemanticReleaseError if does not run on a git repository', async (t) => {
const cwd = tempy.directory(); const cwd = temporaryDirectory();
const options = {branches: []}; const options = {branches: []};
const errors = [...(await t.throwsAsync(verify({cwd, options})))]; const errors = [...(await t.throwsAsync(verify({cwd, options}))).errors];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'ENOGITREPO'); t.is(errors[0].code, 'ENOGITREPO');
@ -43,7 +43,7 @@ test('Throw a SemanticReleaseError if the "tagFormat" is not valid', async (t) =
const {cwd, repositoryUrl} = await gitRepo(true); const {cwd, repositoryUrl} = await gitRepo(true);
const options = {repositoryUrl, tagFormat: `?\${version}`, branches: []}; const options = {repositoryUrl, tagFormat: `?\${version}`, branches: []};
const errors = [...(await t.throwsAsync(verify({cwd, options})))]; const errors = [...(await t.throwsAsync(verify({cwd, options}))).errors];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'EINVALIDTAGFORMAT'); t.is(errors[0].code, 'EINVALIDTAGFORMAT');
@ -55,7 +55,7 @@ test('Throw a SemanticReleaseError if the "tagFormat" does not contains the "ver
const {cwd, repositoryUrl} = await gitRepo(true); const {cwd, repositoryUrl} = await gitRepo(true);
const options = {repositoryUrl, tagFormat: 'test', branches: []}; const options = {repositoryUrl, tagFormat: 'test', branches: []};
const errors = [...(await t.throwsAsync(verify({cwd, options})))]; const errors = [...(await t.throwsAsync(verify({cwd, options}))).errors];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'ETAGNOVERSION'); t.is(errors[0].code, 'ETAGNOVERSION');
@ -67,7 +67,7 @@ test('Throw a SemanticReleaseError if the "tagFormat" contains multiple "version
const {cwd, repositoryUrl} = await gitRepo(true); const {cwd, repositoryUrl} = await gitRepo(true);
const options = {repositoryUrl, tagFormat: `\${version}v\${version}`, branches: []}; const options = {repositoryUrl, tagFormat: `\${version}v\${version}`, branches: []};
const errors = [...(await t.throwsAsync(verify({cwd, options})))]; const errors = [...(await t.throwsAsync(verify({cwd, options}))).errors];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'ETAGNOVERSION'); t.is(errors[0].code, 'ETAGNOVERSION');
@ -83,7 +83,7 @@ test('Throw a SemanticReleaseError for each invalid branch', async (t) => {
branches: [{name: ''}, {name: ' '}, {name: 1}, {}, {name: ''}, 1, 'master'], branches: [{name: ''}, {name: ' '}, {name: 1}, {}, {name: ''}, 1, 'master'],
}; };
const errors = [...(await t.throwsAsync(verify({cwd, options})))]; const errors = [...(await t.throwsAsync(verify({cwd, options}))).errors];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'EINVALIDBRANCH'); t.is(errors[0].code, 'EINVALIDBRANCH');