feat(esm): convert to esm (#2569)

for #2543

BREAKING CHANGE: semantic-release is now ESM-only. since it is used through its own executable, the impact on consuming projects should be minimal

BREAKING CHANGE: references to plugin files in configs need to include the file extension because of executing in an ESM context
This commit is contained in:
Matt Travi 2022-11-11 09:24:06 -06:00 committed by GitHub
parent 4012f75386
commit 9eab1adb9d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
67 changed files with 3001 additions and 1761 deletions

View File

@ -1,20 +1,22 @@
#!/usr/bin/env node
// Bad news: We have to write plain ES5 in this file
// Good news: It's the only file of the entire project
/* eslint-disable no-var */
var semver = require('semver');
var execa = require('execa');
var findVersions = require('find-versions');
var pkg = require('../package.json');
import semver from 'semver';
import { execa } from 'execa';
import findVersions from 'find-versions';
import cli from '../cli.js';
import {createRequire} from 'node:module';
var MIN_GIT_VERSION = '2.7.1';
const require = createRequire(import.meta.url);
const { engines } = require('../package.json');
const { satisfies, lt } = semver;
if (!semver.satisfies(process.version, pkg.engines.node)) {
const MIN_GIT_VERSION = '2.7.1';
if (!satisfies(process.version, engines.node)) {
console.error(
`[semantic-release]: node version ${pkg.engines.node} is required. Found ${process.version}.
`[semantic-release]: node version ${engines.node} is required. Found ${process.version}.
See https://github.com/semantic-release/semantic-release/blob/master/docs/support/node-version.md for more details and solutions.`
);
@ -23,8 +25,8 @@ See https://github.com/semantic-release/semantic-release/blob/master/docs/suppor
execa('git', ['--version'])
.then(({stdout}) => {
var gitVersion = findVersions(stdout)[0];
if (semver.lt(gitVersion, MIN_GIT_VERSION)) {
const gitVersion = findVersions(stdout)[0];
if (lt(gitVersion, MIN_GIT_VERSION)) {
console.error(`[semantic-release]: Git version ${MIN_GIT_VERSION} is required. Found ${gitVersion}.`);
process.exit(1);
}
@ -36,7 +38,7 @@ execa('git', ['--version'])
});
// Node 10+ from this point on
require('../cli')()
cli()
.then((exitCode) => {
process.exitCode = exitCode;
})

22
cli.js
View File

@ -1,6 +1,7 @@
const {argv, env, stderr} = require('process'); // eslint-disable-line node/prefer-global/process
const util = require('util');
const hideSensitive = require('./lib/hide-sensitive');
import util from 'node:util';
import yargs from 'yargs';
import {hideBin} from 'yargs/helpers';
import hideSensitive from './lib/hide-sensitive.js';
const stringList = {
type: 'string',
@ -11,8 +12,8 @@ const stringList = {
: values.reduce((values, value) => values.concat(value.split(',').map((value) => value.trim())), []),
};
module.exports = async () => {
const cli = require('yargs')
export default async () => {
const cli = yargs(hideBin(process.argv))
.command('$0', 'Run automated package publishing', (yargs) => {
yargs.demandCommand(0, 0).usage(`Run automated package publishing
@ -36,12 +37,11 @@ Usage:
.option('debug', {describe: 'Output debugging information', type: 'boolean', group: 'Options'})
.option('d', {alias: 'dry-run', describe: 'Skip publishing', type: 'boolean', group: 'Options'})
.option('h', {alias: 'help', group: 'Options'})
.option('v', {alias: 'version', group: 'Options'})
.strict(false)
.exitProcess(false);
try {
const {help, version, ...options} = cli.parse(argv.slice(2));
const {help, version, ...options} = cli.parse(process.argv.slice(2));
if (Boolean(help) || Boolean(version)) {
return 0;
@ -49,16 +49,16 @@ Usage:
if (options.debug) {
// Debug must be enabled before other requires in order to work
require('debug').enable('semantic-release:*');
(await import('debug')).default.enable('semantic-release:*');
}
await require('.')(options);
await (await import('./index.js')).default(options);
return 0;
} catch (error) {
if (error.name !== 'YError') {
stderr.write(hideSensitive(env)(util.inspect(error, {colors: true})));
process.stderr.write(hideSensitive(process.env)(util.inspect(error, {colors: true})));
}
return 1;
}
};
}

View File

@ -1,24 +1,27 @@
const {pick} = require('lodash');
const marked = require('marked');
const envCi = require('env-ci');
const hookStd = require('hook-std');
const semver = require('semver');
const AggregateError = require('aggregate-error');
import {createRequire} from 'node:module';
import {pick} from 'lodash-es';
import * as marked from 'marked';
import envCi from 'env-ci';
import {hookStdout} from 'hook-std';
import semver from 'semver';
import AggregateError from 'aggregate-error';
import hideSensitive from './lib/hide-sensitive.js';
import getConfig from './lib/get-config.js';
import verify from './lib/verify.js';
import getNextVersion from './lib/get-next-version.js';
import getCommits from './lib/get-commits.js';
import getLastRelease from './lib/get-last-release.js';
import getReleaseToAdd from './lib/get-release-to-add.js';
import {extractErrors, makeTag} from './lib/utils.js';
import getGitAuthUrl from './lib/get-git-auth-url.js';
import getBranches from './lib/branches/index.js';
import getLogger from './lib/get-logger.js';
import {addNote, getGitHead, getTagHead, isBranchUpToDate, push, pushNotes, tag, verifyAuth} from './lib/git.js';
import getError from './lib/get-error.js';
import {COMMIT_EMAIL, COMMIT_NAME} from './lib/definitions/constants.js';
const require = createRequire(import.meta.url);
const pkg = require('./package.json');
const hideSensitive = require('./lib/hide-sensitive');
const getConfig = require('./lib/get-config');
const verify = require('./lib/verify');
const getNextVersion = require('./lib/get-next-version');
const getCommits = require('./lib/get-commits');
const getLastRelease = require('./lib/get-last-release');
const getReleaseToAdd = require('./lib/get-release-to-add');
const {extractErrors, makeTag} = require('./lib/utils');
const getGitAuthUrl = require('./lib/get-git-auth-url');
const getBranches = require('./lib/branches');
const getLogger = require('./lib/get-logger');
const {verifyAuth, isBranchUpToDate, getGitHead, tag, push, pushNotes, getTagHead, addNote} = require('./lib/git');
const getError = require('./lib/get-error');
const {COMMIT_NAME, COMMIT_EMAIL} = require('./lib/definitions/constants');
let markedOptionsSet = false;
async function terminalOutput(text) {
@ -41,7 +44,7 @@ async function run(context, plugins) {
logger.warn('This run was not triggered in a known CI environment, running in dry-run mode.');
options.dryRun = true;
} else {
// When running on CI, set the commits author and commiter info and prevent the `git` CLI to prompt for username/password. See #703.
// When running on CI, set the commits author and committer info and prevent the `git` CLI to prompt for username/password. See #703.
Object.assign(env, {
GIT_AUTHOR_NAME: COMMIT_NAME,
GIT_AUTHOR_EMAIL: COMMIT_EMAIL,
@ -247,8 +250,8 @@ async function callFail(context, plugins, err) {
}
}
module.exports = async (cliOptions = {}, {cwd = process.cwd(), env = process.env, stdout, stderr} = {}) => {
const {unhook} = hookStd(
export default async (cliOptions = {}, {cwd = process.cwd(), env = process.env, stdout, stderr} = {}) => {
const {unhook} = hookStdout(
{silent: false, streams: [process.stdout, process.stderr, stdout, stderr].filter(Boolean)},
hideSensitive(env)
);
@ -278,4 +281,4 @@ module.exports = async (cliOptions = {}, {cwd = process.cwd(), env = process.env
unhook();
throw error;
}
};
}

View File

@ -1,8 +1,8 @@
const {isString, remove, omit, mapValues, template} = require('lodash');
const micromatch = require('micromatch');
const {getBranches} = require('../git');
import {isString, mapValues, omit, remove, template} from 'lodash-es';
import micromatch from 'micromatch';
import {getBranches} from '../git.js';
module.exports = async (repositoryUrl, {cwd}, branches) => {
export default async (repositoryUrl, {cwd}, branches) => {
const gitBranches = await getBranches(repositoryUrl, {cwd});
return branches.reduce(
@ -15,4 +15,4 @@ module.exports = async (repositoryUrl, {cwd}, branches) => {
],
[]
);
};
}

View File

@ -1,10 +1,13 @@
const {template, escapeRegExp} = require('lodash');
const semver = require('semver');
const pReduce = require('p-reduce');
const debug = require('debug')('semantic-release:get-tags');
const {getTags, getNote} = require('../../lib/git');
import {escapeRegExp, template} from 'lodash-es';
import semver from 'semver';
import pReduce from 'p-reduce';
import debugTags from 'debug';
import {getNote, getTags} from '../../lib/git.js';
module.exports = async ({cwd, env, options: {tagFormat}}, branches) => {
const debug = debugTags('semantic-release:get-tags');
export default async ({cwd, env, options: {tagFormat}}, branches) => {
// Generate a regex to parse tags formatted with `tagFormat`
// by replacing the `version` variable in the template by `(.+)`.
// The `tagFormat` is compiled with space as the `version` as it's an invalid tag character,
@ -30,4 +33,4 @@ module.exports = async ({cwd, env, options: {tagFormat}}, branches) => {
},
[]
);
};
}

View File

@ -1,14 +1,14 @@
const {isString, isRegExp} = require('lodash');
const AggregateError = require('aggregate-error');
const pEachSeries = require('p-each-series');
const DEFINITIONS = require('../definitions/branches');
const getError = require('../get-error');
const {fetch, fetchNotes, verifyBranchName} = require('../git');
const expand = require('./expand');
const getTags = require('./get-tags');
const normalize = require('./normalize');
import {isRegExp, isString} from 'lodash-es';
import AggregateError from 'aggregate-error';
import pEachSeries from 'p-each-series';
import * as DEFINITIONS from '../definitions/branches.js';
import getError from '../get-error.js';
import {fetch, fetchNotes, verifyBranchName} from '../git.js';
import expand from './expand.js';
import getTags from './get-tags.js';
import * as normalize from './normalize.js';
module.exports = async (repositoryUrl, ciBranch, context) => {
export default async (repositoryUrl, ciBranch, context) => {
const {cwd, env} = context;
const remoteBranches = await expand(
@ -68,4 +68,4 @@ module.exports = async (repositoryUrl, ciBranch, context) => {
}
return [...result.maintenance, ...result.release, ...result.prerelease];
};
}

View File

@ -1,19 +1,18 @@
const {sortBy, isNil} = require('lodash');
const semverDiff = require('semver-diff');
const {FIRST_RELEASE, RELEASE_TYPE} = require('../definitions/constants');
const {
tagsToVersions,
isMajorRange,
getUpperBound,
getLowerBound,
highest,
lowest,
getLatestVersion,
import {isNil, sortBy} from 'lodash-es';
import semverDiff from 'semver-diff';
import {FIRST_RELEASE, RELEASE_TYPE} from '../definitions/constants.js';
import {
getFirstVersion,
getRange,
} = require('../utils');
getLatestVersion,
getLowerBound, getRange,
getUpperBound,
highest,
isMajorRange,
lowest,
tagsToVersions
} from '../utils.js';
function maintenance({maintenance, release}) {
export function maintenance({maintenance, release}) {
return sortBy(
maintenance.map(({name, range, channel, ...rest}) => ({
...rest,
@ -55,7 +54,7 @@ function maintenance({maintenance, release}) {
});
}
function release({release}) {
export function release({release}) {
if (release.length === 0) {
return release;
}
@ -89,7 +88,7 @@ function release({release}) {
});
}
function prerelease({prerelease}) {
export function prerelease({prerelease}) {
return prerelease.map(({name, prerelease, channel, tags, ...rest}) => {
const preid = prerelease === true ? name : prerelease;
return {
@ -102,5 +101,3 @@ function prerelease({prerelease}) {
};
});
}
module.exports = {maintenance, release, prerelease};

View File

@ -1,24 +1,22 @@
const {isNil, uniqBy} = require('lodash');
const semver = require('semver');
const {isMaintenanceRange} = require('../utils');
import {isNil, uniqBy} from 'lodash-es';
import semver from 'semver';
import {isMaintenanceRange} from '../utils.js';
const maintenance = {
export const maintenance = {
filter: ({name, range}) => (!isNil(range) && range !== false) || isMaintenanceRange(name),
branchValidator: ({range}) => (isNil(range) ? true : isMaintenanceRange(range)),
branchesValidator: (branches) => uniqBy(branches, ({range}) => semver.validRange(range)).length === branches.length,
};
const prerelease = {
export const prerelease = {
filter: ({prerelease}) => !isNil(prerelease) && prerelease !== false,
branchValidator: ({name, prerelease}) =>
Boolean(prerelease) && Boolean(semver.valid(`1.0.0-${prerelease === true ? name : prerelease}.1`)),
branchesValidator: (branches) => uniqBy(branches, 'prerelease').length === branches.length,
};
const release = {
export const release = {
// eslint-disable-next-line unicorn/no-fn-reference-in-iterator
filter: (branch) => !maintenance.filter(branch) && !prerelease.filter(branch),
branchesValidator: (branches) => branches.length <= 3 && branches.length > 0,
};
module.exports = {maintenance, prerelease, release};

View File

@ -1,29 +1,17 @@
const RELEASE_TYPE = ['patch', 'minor', 'major'];
export const RELEASE_TYPE = ['patch', 'minor', 'major'];
const FIRST_RELEASE = '1.0.0';
export const FIRST_RELEASE = '1.0.0';
const FIRSTPRERELEASE = '1';
export const FIRSTPRERELEASE = '1';
const COMMIT_NAME = 'semantic-release-bot';
export const COMMIT_NAME = 'semantic-release-bot';
const COMMIT_EMAIL = 'semantic-release-bot@martynus.net';
export const COMMIT_EMAIL = 'semantic-release-bot@martynus.net';
const RELEASE_NOTES_SEPARATOR = '\n\n';
export const RELEASE_NOTES_SEPARATOR = '\n\n';
const SECRET_REPLACEMENT = '[secure]';
export const SECRET_REPLACEMENT = '[secure]';
const SECRET_MIN_SIZE = 5;
export const SECRET_MIN_SIZE = 5;
const GIT_NOTE_REF = 'semantic-release';
module.exports = {
RELEASE_TYPE,
FIRST_RELEASE,
FIRSTPRERELEASE,
COMMIT_NAME,
COMMIT_EMAIL,
RELEASE_NOTES_SEPARATOR,
SECRET_REPLACEMENT,
SECRET_MIN_SIZE,
GIT_NOTE_REF,
};
export const GIT_NOTE_REF = 'semantic-release';

View File

@ -1,7 +1,10 @@
const {inspect} = require('util');
const {toLower, isString, trim} = require('lodash');
import {inspect} from 'node:util';
import {createRequire} from 'node:module';
import {isString, toLower, trim} from 'lodash-es';
import {RELEASE_TYPE} from './constants.js';
const require = createRequire(import.meta.url);
const pkg = require('../../package.json');
const {RELEASE_TYPE} = require('./constants');
const [homepage] = pkg.homepage.split('#');
const stringify = (object) =>
@ -10,16 +13,19 @@ const linkify = (file) => `${homepage}/blob/master/${file}`;
const wordsList = (words) =>
`${words.slice(0, -1).join(', ')}${words.length > 1 ? ` or ${words[words.length - 1]}` : trim(words[0])}`;
module.exports = {
ENOGITREPO: ({cwd}) => ({
export function ENOGITREPO({cwd}) {
return {
message: 'Not running from a git repository.',
details: `The \`semantic-release\` command must be executed from a Git repository.
The current working directory is \`${cwd}\`.
Please verify your CI configuration to make sure the \`semantic-release\` command is executed from the root of the cloned repository.`,
}),
ENOREPOURL: () => ({
};
}
export function ENOREPOURL() {
return {
message: 'The `repositoryUrl` option is required.',
details: `The [repositoryUrl option](${linkify(
'docs/usage/configuration.md#repositoryurl'
@ -28,8 +34,11 @@ Please verify your CI configuration to make sure the \`semantic-release\` comman
Please make sure to add the \`repositoryUrl\` to the [semantic-release configuration] (${linkify(
'docs/usage/configuration.md'
)}).`,
}),
EGITNOPERMISSION: ({options: {repositoryUrl}, branch: {name}}) => ({
};
}
export function EGITNOPERMISSION({options: {repositoryUrl}, branch: {name}}) {
return {
message: 'Cannot push to the Git repository.',
details: `**semantic-release** cannot push the version tag to the branch \`${name}\` on the remote Git repository with URL \`${repositoryUrl}\`.
@ -39,40 +48,55 @@ This can be caused by:
- or missing push permission for the user configured via the [Git credentials on your CI environment](${linkify(
'docs/usage/ci-configuration.md#authentication'
)})`,
}),
EINVALIDTAGFORMAT: ({options: {tagFormat}}) => ({
};
}
export function EINVALIDTAGFORMAT({options: {tagFormat}}) {
return {
message: 'Invalid `tagFormat` option.',
details: `The [tagFormat](${linkify(
'docs/usage/configuration.md#tagformat'
)}) must compile to a [valid Git reference](https://git-scm.com/docs/git-check-ref-format#_description).
Your configuration for the \`tagFormat\` option is \`${stringify(tagFormat)}\`.`,
}),
ETAGNOVERSION: ({options: {tagFormat}}) => ({
};
}
export function ETAGNOVERSION({options: {tagFormat}}) {
return {
message: 'Invalid `tagFormat` option.',
details: `The [tagFormat](${linkify(
'docs/usage/configuration.md#tagformat'
)}) option must contain the variable \`version\` exactly once.
Your configuration for the \`tagFormat\` option is \`${stringify(tagFormat)}\`.`,
}),
EPLUGINCONF: ({type, required, pluginConf}) => ({
};
}
export function EPLUGINCONF({type, required, pluginConf}) {
return {
message: `The \`${type}\` plugin configuration is invalid.`,
details: `The [${type} plugin configuration](${linkify(`docs/usage/plugins.md#${toLower(type)}-plugin`)}) ${
required ? 'is required and ' : ''
} must be a single or an array of plugins definition. A plugin definition is an npm module name, optionally wrapped in an array with an object.
Your configuration for the \`${type}\` plugin is \`${stringify(pluginConf)}\`.`,
}),
EPLUGINSCONF: ({plugin}) => ({
};
}
export function EPLUGINSCONF({plugin}) {
return {
message: 'The `plugins` configuration is invalid.',
details: `The [plugins](${linkify(
'docs/usage/configuration.md#plugins'
)}) option must be an array of plugin definitions. A plugin definition is an npm module name, optionally wrapped in an array with an object.
The invalid configuration is \`${stringify(plugin)}\`.`,
}),
EPLUGIN: ({pluginName, type}) => ({
};
}
export function EPLUGIN({pluginName, type}) {
return {
message: `A plugin configured in the step ${type} is not a valid semantic-release plugin.`,
details: `A valid \`${type}\` **semantic-release** plugin must be a function or an object with a function in the property \`${type}\`.
@ -81,8 +105,11 @@ The plugin \`${pluginName}\` doesn't have the property \`${type}\` and cannot be
Please refer to the \`${pluginName}\` and [semantic-release plugins configuration](${linkify(
'docs/usage/plugins.md'
)}) documentation for more details.`,
}),
EANALYZECOMMITSOUTPUT: ({result, pluginName}) => ({
};
}
export function EANALYZECOMMITSOUTPUT({result, pluginName}) {
return {
message: 'The `analyzeCommits` plugin returned an invalid value. It must return a valid semver release type.',
details: `The \`analyzeCommits\` plugin must return a valid [semver](https://semver.org) release type. The valid values are: ${RELEASE_TYPE.map(
(type) => `\`${type}\``
@ -97,8 +124,11 @@ We recommend to report the issue to the \`${pluginName}\` authors, providing the
- A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify(
'docs/developer-guide/plugin.md'
)})`,
}),
EGENERATENOTESOUTPUT: ({result, pluginName}) => ({
};
}
export function EGENERATENOTESOUTPUT({result, pluginName}) {
return {
message: 'The `generateNotes` plugin returned an invalid value. It must return a `String`.',
details: `The \`generateNotes\` plugin must return a \`String\`.
@ -111,8 +141,11 @@ We recommend to report the issue to the \`${pluginName}\` authors, providing the
- A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify(
'docs/developer-guide/plugin.md'
)})`,
}),
EPUBLISHOUTPUT: ({result, pluginName}) => ({
};
}
export function EPUBLISHOUTPUT({result, pluginName}) {
return {
message: 'A `publish` plugin returned an invalid value. It must return an `Object`.',
details: `The \`publish\` plugins must return an \`Object\`.
@ -125,8 +158,11 @@ We recommend to report the issue to the \`${pluginName}\` authors, providing the
- A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify(
'docs/developer-guide/plugin.md'
)})`,
}),
EADDCHANNELOUTPUT: ({result, pluginName}) => ({
};
}
export function EADDCHANNELOUTPUT({result, pluginName}) {
return {
message: 'A `addChannel` plugin returned an invalid value. It must return an `Object`.',
details: `The \`addChannel\` plugins must return an \`Object\`.
@ -139,48 +175,66 @@ We recommend to report the issue to the \`${pluginName}\` authors, providing the
- A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify(
'docs/developer-guide/plugin.md'
)})`,
}),
EINVALIDBRANCH: ({branch}) => ({
};
}
export function EINVALIDBRANCH({branch}) {
return {
message: 'A branch is invalid in the `branches` configuration.',
details: `Each branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches'
)}) must be either a string, a regexp or an object with a \`name\` property.
Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
}),
EINVALIDBRANCHNAME: ({branch}) => ({
};
}
export function EINVALIDBRANCHNAME({branch}) {
return {
message: 'A branch name is invalid in the `branches` configuration.',
details: `Each branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches'
)}) must be a [valid Git reference](https://git-scm.com/docs/git-check-ref-format#_description).
Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
}),
EDUPLICATEBRANCHES: ({duplicates}) => ({
};
}
export function EDUPLICATEBRANCHES({duplicates}) {
return {
message: 'The `branches` configuration has duplicate branches.',
details: `Each branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches'
)}) must havea unique name.
Your configuration contains duplicates for the following branch names: \`${stringify(duplicates)}\`.`,
}),
EMAINTENANCEBRANCH: ({branch}) => ({
};
}
export function EMAINTENANCEBRANCH({branch}) {
return {
message: 'A maintenance branch is invalid in the `branches` configuration.',
details: `Each maintenance branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches'
)}) must have a \`range\` property formatted like \`N.x\`, \`N.x.x\` or \`N.N.x\` (\`N\` is a number).
Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
}),
EMAINTENANCEBRANCHES: ({branches}) => ({
};
}
export function EMAINTENANCEBRANCHES({branches}) {
return {
message: 'The maintenance branches are invalid in the `branches` configuration.',
details: `Each maintenance branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches'
)}) must have a unique \`range\` property.
Your configuration for the problematic branches is \`${stringify(branches)}\`.`,
}),
ERELEASEBRANCHES: ({branches}) => ({
};
}
export function ERELEASEBRANCHES({branches}) {
return {
message: 'The release branches are invalid in the `branches` configuration.',
details: `A minimum of 1 and a maximum of 3 release branches are required in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches'
@ -189,24 +243,33 @@ Your configuration for the problematic branches is \`${stringify(branches)}\`.`,
This may occur if your repository does not have a release branch, such as \`master\`.
Your configuration for the problematic branches is \`${stringify(branches)}\`.`,
}),
EPRERELEASEBRANCH: ({branch}) => ({
};
}
export function EPRERELEASEBRANCH({branch}) {
return {
message: 'A pre-release branch configuration is invalid in the `branches` configuration.',
details: `Each pre-release branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches'
)}) must have a \`prerelease\` property valid per the [Semantic Versioning Specification](https://semver.org/#spec-item-9). If the \`prerelease\` property is set to \`true\`, then the \`name\` property is used instead.
Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
}),
EPRERELEASEBRANCHES: ({branches}) => ({
};
}
export function EPRERELEASEBRANCHES({branches}) {
return {
message: 'The pre-release branches are invalid in the `branches` configuration.',
details: `Each pre-release branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches'
)}) must have a unique \`prerelease\` property. If the \`prerelease\` property is set to \`true\`, then the \`name\` property is used instead.
Your configuration for the problematic branches is \`${stringify(branches)}\`.`,
}),
EINVALIDNEXTVERSION: ({nextRelease: {version}, branch: {name, range}, commits, validBranches}) => ({
};
}
export function EINVALIDNEXTVERSION({nextRelease: {version}, branch: {name, range}, commits, validBranches}) {
return {
message: `The release \`${version}\` on branch \`${name}\` cannot be published as it is out of range.`,
details: `Based on the releases published on other branches, only versions within the range \`${range}\` can be published from branch \`${name}\`.
@ -220,13 +283,16 @@ ${
A valid branch could be ${wordsList(validBranches.map(({name}) => `\`${name}\``))}.
See the [workflow configuration documentation](${linkify('docs/usage/workflow-configuration.md')}) for more details.`,
}),
EINVALIDMAINTENANCEMERGE: ({nextRelease: {channel, gitTag, version}, branch: {mergeRange, name}}) => ({
};
}
export function EINVALIDMAINTENANCEMERGE({nextRelease: {channel, gitTag, version}, branch: {mergeRange, name}}) {
return {
message: `The release \`${version}\` on branch \`${name}\` cannot be published as it is out of range.`,
details: `Only releases within the range \`${mergeRange}\` can be merged into the maintenance branch \`${name}\` and published to the \`${channel}\` distribution channel.
The branch \`${name}\` head should be [reset](https://git-scm.com/docs/git-reset) to a previous commit so the commit with tag \`${gitTag}\` is removed from the branch history.
See the [workflow configuration documentation](${linkify('docs/usage/workflow-configuration.md')}) for more details.`,
}),
};
}

View File

@ -1,12 +1,12 @@
/* eslint require-atomic-updates: off */
const {isString, isPlainObject} = require('lodash');
const {getGitHead} = require('../git');
const hideSensitive = require('../hide-sensitive');
const {hideSensitiveValues} = require('../utils');
const {RELEASE_TYPE, RELEASE_NOTES_SEPARATOR} = require('./constants');
import {isPlainObject, isString} from 'lodash-es';
import {getGitHead} from '../git.js';
import hideSensitive from '../hide-sensitive.js';
import {hideSensitiveValues} from '../utils.js';
import {RELEASE_NOTES_SEPARATOR, RELEASE_TYPE} from './constants.js';
module.exports = {
export default {
verifyConditions: {
required: false,
dryRun: true,

View File

@ -1,5 +1,7 @@
const debug = require('debug')('semantic-release:get-commits');
const {getCommits} = require('./git');
import debugCommits from 'debug';
import {getCommits} from './git.js';
const debug = debugCommits('semantic-release:get-commits');
/**
* Retrieve the list of commits on the current branch since the commit sha associated with the last release, or all the commits of the current branch if there is no last released version.
@ -8,7 +10,7 @@ const {getCommits} = require('./git');
*
* @return {Promise<Array<Object>>} The list of commits on the branch `branch` since the last release.
*/
module.exports = async ({cwd, env, lastRelease: {gitHead: from}, nextRelease: {gitHead: to = 'HEAD'} = {}, logger}) => {
export default async ({cwd, env, lastRelease: {gitHead: from}, nextRelease: {gitHead: to = 'HEAD'} = {}, logger}) => {
if (from) {
debug('Use from: %s', from);
} else {
@ -20,4 +22,4 @@ module.exports = async ({cwd, env, lastRelease: {gitHead: from}, nextRelease: {g
logger.log(`Found ${commits.length} commits since last release`);
debug('Parsed commits: %o', commits);
return commits;
};
}

View File

@ -1,16 +1,24 @@
const {castArray, pickBy, isNil, isString, isPlainObject} = require('lodash');
const readPkgUp = require('read-pkg-up');
const {cosmiconfig} = require('cosmiconfig');
const resolveFrom = require('resolve-from');
const debug = require('debug')('semantic-release:config');
const {repoUrl} = require('./git');
const PLUGINS_DEFINITIONS = require('./definitions/plugins');
const plugins = require('./plugins');
const {validatePlugin, parseConfig} = require('./plugins/utils');
import {dirname, resolve} from 'node:path';
import {fileURLToPath} from 'node:url';
import {createRequire} from 'node:module';
import {castArray, isNil, isPlainObject, isString, pickBy} from 'lodash-es';
import {readPackageUp} from 'read-pkg-up';
import {cosmiconfig} from 'cosmiconfig';
import resolveFrom from 'resolve-from';
import debugConfig from 'debug';
import {repoUrl} from './git.js';
import PLUGINS_DEFINITIONS from './definitions/plugins.js';
import plugins from './plugins/index.js';
import {parseConfig, validatePlugin} from './plugins/utils.js';
const debug = debugConfig('semantic-release:config');
const __dirname = dirname(fileURLToPath(import.meta.url));
const require = createRequire(import.meta.url);
const CONFIG_NAME = 'release';
module.exports = async (context, cliOptions) => {
export default async (context, cliOptions) => {
const {cwd, env} = context;
const {config, filepath} = (await cosmiconfig(CONFIG_NAME).search(cwd)) || {};
@ -25,11 +33,12 @@ module.exports = async (context, cliOptions) => {
if (extendPaths) {
// If `extends` is defined, load and merge each shareable config with `options`
options = {
...castArray(extendPaths).reduce((result, extendPath) => {
...await (castArray(extendPaths).reduce(async(eventualResult, extendPath) => {
const result = await eventualResult;
const extendsOptions = require(resolveFrom.silent(__dirname, extendPath) || resolveFrom(cwd, extendPath));
// For each plugin defined in a shareable config, save in `pluginsPath` the extendable config path,
// so those plugin will be loaded relatively to the config file
// so those plugin will be loaded relative to the config file
Object.entries(extendsOptions)
.filter(([, value]) => Boolean(value))
.reduce((pluginsPath, [option, value]) => {
@ -47,7 +56,7 @@ module.exports = async (context, cliOptions) => {
}, pluginsPath);
return {...result, ...extendsOptions};
}, {}),
}, {})),
...options,
};
}
@ -70,7 +79,7 @@ module.exports = async (context, cliOptions) => {
'@semantic-release/npm',
'@semantic-release/github',
],
// Remove `null` and `undefined` options so they can be replaced with default ones
// Remove `null` and `undefined` options, so they can be replaced with default ones
...pickBy(options, (option) => !isNil(option)),
...(options.branches ? {branches: castArray(options.branches)} : {}),
};
@ -82,9 +91,9 @@ module.exports = async (context, cliOptions) => {
debug('options values: %O', options);
return {options, plugins: await plugins({...context, options}, pluginsPath)};
};
}
async function pkgRepoUrl(options) {
const {packageJson} = (await readPkgUp(options)) || {};
const {packageJson} = (await readPackageUp(options)) || {};
return packageJson && (isPlainObject(packageJson.repository) ? packageJson.repository.url : packageJson.repository);
}

View File

@ -1,7 +1,7 @@
const SemanticReleaseError = require('@semantic-release/error');
const ERROR_DEFINITIONS = require('./definitions/errors');
import SemanticReleaseError from '@semantic-release/error';
import * as ERROR_DEFINITIONS from './definitions/errors.js';
module.exports = (code, ctx = {}) => {
export default (code, ctx = {}) => {
const {message, details} = ERROR_DEFINITIONS[code](ctx);
return new SemanticReleaseError(message, code, details);
};
}

View File

@ -1,8 +1,10 @@
const {parse, format} = require('url'); // eslint-disable-line node/no-deprecated-api
const {isNil} = require('lodash');
const hostedGitInfo = require('hosted-git-info');
const {verifyAuth} = require('./git');
const debug = require('debug')('semantic-release:get-git-auth-url');
import {format, parse} from 'node:url';
import {isNil} from 'lodash-es';
import hostedGitInfo from 'hosted-git-info';
import debugAuthUrl from 'debug';
import {verifyAuth} from './git.js';
const debug = debugAuthUrl('semantic-release:get-git-auth-url');
/**
* Machinery to format a repository URL with the given credentials
@ -57,7 +59,7 @@ async function ensureValidAuthUrl({cwd, env, branch}, authUrl) {
*
* @return {String} The formatted Git repository URL.
*/
module.exports = async (context) => {
export default async (context) => {
const {cwd, env, branch} = context;
const GIT_TOKENS = {
GIT_CREDENTIALS: undefined,
@ -119,4 +121,4 @@ module.exports = async (context) => {
}
return repositoryUrl;
};
}

View File

@ -1,6 +1,6 @@
const {isUndefined} = require('lodash');
const semver = require('semver');
const {makeTag, isSameChannel} = require('./utils');
import {isUndefined} from 'lodash-es';
import semver from 'semver';
import {isSameChannel, makeTag} from './utils.js';
/**
* Last release.
@ -18,7 +18,7 @@ const {makeTag, isSameChannel} = require('./utils');
*
* - Filter out the branch tags that are not valid semantic version
* - Sort the versions
* - Retrive the highest version
* - Retrieve the highest version
*
* @param {Object} context semantic-release context.
* @param {Object} params Function parameters.
@ -26,7 +26,7 @@ const {makeTag, isSameChannel} = require('./utils');
*
* @return {LastRelease} The last tagged release or empty object if none is found.
*/
module.exports = ({branch, options: {tagFormat}}, {before} = {}) => {
export default ({branch, options: {tagFormat}}, {before} = {}) => {
const [{version, gitTag, channels} = {}] = branch.tags
.filter(
(tag) =>
@ -41,4 +41,4 @@ module.exports = ({branch, options: {tagFormat}}, {before} = {}) => {
}
return {};
};
}

View File

@ -1,7 +1,9 @@
const {Signale} = require('signale');
const figures = require('figures');
import signale from 'signale';
import figures from 'figures';
module.exports = ({stdout, stderr}) =>
const {Signale} = signale;
export default ({stdout, stderr}) =>
new Signale({
config: {displayTimestamp: true, underlineMessage: false, displayLabel: false},
disabled: false,
@ -13,4 +15,4 @@ module.exports = ({stdout, stderr}) =>
log: {badge: figures.info, color: 'magenta', label: '', stream: [stdout]},
success: {badge: figures.tick, color: 'green', label: '', stream: [stdout]},
},
});
})

View File

@ -1,8 +1,8 @@
const semver = require('semver');
const {FIRST_RELEASE, FIRSTPRERELEASE} = require('./definitions/constants');
const {isSameChannel, getLatestVersion, tagsToVersions, highest} = require('./utils');
import semver from 'semver';
import {FIRST_RELEASE, FIRSTPRERELEASE} from './definitions/constants.js';
import {getLatestVersion, highest, isSameChannel, tagsToVersions} from './utils.js';
module.exports = ({branch, nextRelease: {type, channel}, lastRelease, logger}) => {
export default ({branch, nextRelease: {type, channel}, lastRelease, logger}) => {
let version;
if (lastRelease.version) {
const {major, minor, patch} = semver.parse(lastRelease.version);
@ -32,4 +32,4 @@ module.exports = ({branch, nextRelease: {type, channel}, lastRelease, logger}) =
}
return version;
};
}

View File

@ -1,8 +1,8 @@
const {uniqBy, intersection} = require('lodash');
const semver = require('semver');
const semverDiff = require('semver-diff');
const getLastRelease = require('./get-last-release');
const {makeTag, getLowerBound} = require('./utils');
import {intersection, uniqBy} from 'lodash-es';
import semver from 'semver';
import semverDiff from 'semver-diff';
import getLastRelease from './get-last-release.js';
import {getLowerBound, makeTag} from './utils.js';
/**
* Find releases that have been merged from from a higher branch but not added on the channel of the current branch.
@ -11,7 +11,7 @@ const {makeTag, getLowerBound} = require('./utils');
*
* @return {Array<Object>} Last release and next release to be added on the channel of the current branch.
*/
module.exports = (context) => {
export default (context) => {
const {
branch,
branches,
@ -57,4 +57,4 @@ module.exports = (context) => {
},
};
}
};
}

View File

@ -1,8 +1,10 @@
const gitLogParser = require('git-log-parser');
const getStream = require('get-stream');
const execa = require('execa');
const debug = require('debug')('semantic-release:git');
const {GIT_NOTE_REF} = require('./definitions/constants');
import gitLogParser from 'git-log-parser';
import getStream from 'get-stream';
import {execa} from 'execa';
import debugGit from 'debug';
import {GIT_NOTE_REF} from './definitions/constants.js';
const debug = debugGit('semantic-release:git');
Object.assign(gitLogParser.fields, {hash: 'H', message: 'B', gitTags: 'd', committerDate: {key: 'ci', type: Date}});
@ -14,7 +16,7 @@ Object.assign(gitLogParser.fields, {hash: 'H', message: 'B', gitTags: 'd', commi
*
* @return {String} The commit sha of the tag in parameter or `null`.
*/
async function getTagHead(tagName, execaOptions) {
export async function getTagHead(tagName, execaOptions) {
return (await execa('git', ['rev-list', '-1', tagName], execaOptions)).stdout;
}
@ -27,7 +29,7 @@ async function getTagHead(tagName, execaOptions) {
* @return {Array<String>} List of git tags.
* @throws {Error} If the `git` command fails.
*/
async function getTags(branch, execaOptions) {
export async function getTags(branch, execaOptions) {
return (await execa('git', ['tag', '--merged', branch], execaOptions)).stdout
.split('\n')
.map((tag) => tag.trim())
@ -42,7 +44,7 @@ async function getTags(branch, execaOptions) {
* @param {Object} [execaOpts] Options to pass to `execa`.
* @return {Promise<Array<Object>>} The list of commits between `from` and `to`.
*/
async function getCommits(from, to, execaOptions) {
export async function getCommits(from, to, execaOptions) {
return (
await getStream.array(
gitLogParser.parse(
@ -62,7 +64,7 @@ async function getCommits(from, to, execaOptions) {
* @return {Array<String>} List of git branches.
* @throws {Error} If the `git` command fails.
*/
async function getBranches(repositoryUrl, execaOptions) {
export async function getBranches(repositoryUrl, execaOptions) {
return (await execa('git', ['ls-remote', '--heads', repositoryUrl], execaOptions)).stdout
.split('\n')
.filter(Boolean)
@ -77,7 +79,7 @@ async function getBranches(repositoryUrl, execaOptions) {
*
* @return {Boolean} `true` if the reference exists, falsy otherwise.
*/
async function isRefExists(ref, execaOptions) {
export async function isRefExists(ref, execaOptions) {
try {
return (await execa('git', ['rev-parse', '--verify', ref], execaOptions)).exitCode === 0;
} catch (error) {
@ -99,7 +101,7 @@ async function isRefExists(ref, execaOptions) {
* @param {String} branch The repository branch to fetch.
* @param {Object} [execaOpts] Options to pass to `execa`.
*/
async function fetch(repositoryUrl, branch, ciBranch, execaOptions) {
export async function fetch(repositoryUrl, branch, ciBranch, execaOptions) {
const isDetachedHead =
(await execa('git', ['rev-parse', '--abbrev-ref', 'HEAD'], {...execaOptions, reject: false})).stdout === 'HEAD';
@ -137,7 +139,7 @@ async function fetch(repositoryUrl, branch, ciBranch, execaOptions) {
* @param {String} repositoryUrl The remote repository URL.
* @param {Object} [execaOpts] Options to pass to `execa`.
*/
async function fetchNotes(repositoryUrl, execaOptions) {
export async function fetchNotes(repositoryUrl, execaOptions) {
try {
await execa(
'git',
@ -159,7 +161,7 @@ async function fetchNotes(repositoryUrl, execaOptions) {
*
* @return {String} the sha of the HEAD commit.
*/
async function getGitHead(execaOptions) {
export async function getGitHead(execaOptions) {
return (await execa('git', ['rev-parse', 'HEAD'], execaOptions)).stdout;
}
@ -170,7 +172,7 @@ async function getGitHead(execaOptions) {
*
* @return {string} The value of the remote git URL.
*/
async function repoUrl(execaOptions) {
export async function repoUrl(execaOptions) {
try {
return (await execa('git', ['config', '--get', 'remote.origin.url'], execaOptions)).stdout;
} catch (error) {
@ -185,7 +187,7 @@ async function repoUrl(execaOptions) {
*
* @return {Boolean} `true` if the current working directory is in a git repository, falsy otherwise.
*/
async function isGitRepo(execaOptions) {
export async function isGitRepo(execaOptions) {
try {
return (await execa('git', ['rev-parse', '--git-dir'], execaOptions)).exitCode === 0;
} catch (error) {
@ -202,7 +204,7 @@ async function isGitRepo(execaOptions) {
*
* @throws {Error} if not authorized to push.
*/
async function verifyAuth(repositoryUrl, branch, execaOptions) {
export async function verifyAuth(repositoryUrl, branch, execaOptions) {
try {
await execa('git', ['push', '--dry-run', '--no-verify', repositoryUrl, `HEAD:${branch}`], execaOptions);
} catch (error) {
@ -220,7 +222,7 @@ async function verifyAuth(repositoryUrl, branch, execaOptions) {
*
* @throws {Error} if the tag creation failed.
*/
async function tag(tagName, ref, execaOptions) {
export async function tag(tagName, ref, execaOptions) {
await execa('git', ['tag', tagName, ref], execaOptions);
}
@ -232,7 +234,7 @@ async function tag(tagName, ref, execaOptions) {
*
* @throws {Error} if the push failed.
*/
async function push(repositoryUrl, execaOptions) {
export async function push(repositoryUrl, execaOptions) {
await execa('git', ['push', '--tags', repositoryUrl], execaOptions);
}
@ -244,7 +246,7 @@ async function push(repositoryUrl, execaOptions) {
*
* @throws {Error} if the push failed.
*/
async function pushNotes(repositoryUrl, execaOptions) {
export async function pushNotes(repositoryUrl, execaOptions) {
await execa('git', ['push', repositoryUrl, `refs/notes/${GIT_NOTE_REF}`], execaOptions);
}
@ -256,7 +258,7 @@ async function pushNotes(repositoryUrl, execaOptions) {
*
* @return {Boolean} `true` if valid, falsy otherwise.
*/
async function verifyTagName(tagName, execaOptions) {
export async function verifyTagName(tagName, execaOptions) {
try {
return (await execa('git', ['check-ref-format', `refs/tags/${tagName}`], execaOptions)).exitCode === 0;
} catch (error) {
@ -272,7 +274,7 @@ async function verifyTagName(tagName, execaOptions) {
*
* @return {Boolean} `true` if valid, falsy otherwise.
*/
async function verifyBranchName(branch, execaOptions) {
export async function verifyBranchName(branch, execaOptions) {
try {
return (await execa('git', ['check-ref-format', `refs/heads/${branch}`], execaOptions)).exitCode === 0;
} catch (error) {
@ -289,7 +291,7 @@ async function verifyBranchName(branch, execaOptions) {
*
* @return {Boolean} `true` is the HEAD of the current local branch is the same as the HEAD of the remote branch, falsy otherwise.
*/
async function isBranchUpToDate(repositoryUrl, branch, execaOptions) {
export async function isBranchUpToDate(repositoryUrl, branch, execaOptions) {
return (
(await getGitHead(execaOptions)) ===
(await execa('git', ['ls-remote', '--heads', repositoryUrl, branch], execaOptions)).stdout.match(/^(?<ref>\w+)?/)[1]
@ -304,7 +306,7 @@ async function isBranchUpToDate(repositoryUrl, branch, execaOptions) {
*
* @return {Object} the parsed JSON note if there is one, an empty object otherwise.
*/
async function getNote(ref, execaOptions) {
export async function getNote(ref, execaOptions) {
try {
return JSON.parse((await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'show', ref], execaOptions)).stdout);
} catch (error) {
@ -324,28 +326,6 @@ async function getNote(ref, execaOptions) {
* @param {String} ref The Git reference to add the note to.
* @param {Object} [execaOpts] Options to pass to `execa`.
*/
async function addNote(note, ref, execaOptions) {
export async function addNote(note, ref, execaOptions) {
await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'add', '-f', '-m', JSON.stringify(note), ref], execaOptions);
}
module.exports = {
getTagHead,
getTags,
getCommits,
getBranches,
isRefExists,
fetch,
fetchNotes,
getGitHead,
repoUrl,
isGitRepo,
verifyAuth,
tag,
push,
pushNotes,
verifyTagName,
isBranchUpToDate,
verifyBranchName,
getNote,
addNote,
};

View File

@ -1,7 +1,7 @@
const {escapeRegExp, size, isString} = require('lodash');
const {SECRET_REPLACEMENT, SECRET_MIN_SIZE} = require('./definitions/constants');
import {escapeRegExp, isString, size} from 'lodash-es';
import {SECRET_MIN_SIZE, SECRET_REPLACEMENT} from './definitions/constants.js';
module.exports = (env) => {
export default (env) => {
const toReplace = Object.keys(env).filter((envVar) => {
// https://github.com/semantic-release/semantic-release/issues/1558
if (envVar === 'GOPRIVATE') {
@ -17,4 +17,4 @@ module.exports = (env) => {
);
return (output) =>
output && isString(output) && toReplace.length > 0 ? output.toString().replace(regexp, SECRET_REPLACEMENT) : output;
};
}

View File

@ -1,12 +1,12 @@
const {identity, isPlainObject, omit, castArray, isNil, isString} = require('lodash');
const AggregateError = require('aggregate-error');
const getError = require('../get-error');
const PLUGINS_DEFINITIONS = require('../definitions/plugins');
const {validatePlugin, validateStep, loadPlugin, parseConfig} = require('./utils');
const pipeline = require('./pipeline');
const normalize = require('./normalize');
import {castArray, identity, isNil, isPlainObject, isString, omit} from 'lodash-es';
import AggregateError from 'aggregate-error';
import getError from '../get-error.js';
import PLUGINS_DEFINITIONS from '../definitions/plugins.js';
import {loadPlugin, parseConfig, validatePlugin, validateStep} from './utils.js';
import pipeline from './pipeline.js';
import normalize from './normalize.js';
module.exports = async (context, pluginsPath) => {
export default async (context, pluginsPath) => {
let {options, logger} = context;
const errors = [];
@ -100,4 +100,4 @@ module.exports = async (context, pluginsPath) => {
}
return pluginsConfig;
};
}

View File

@ -1,11 +1,13 @@
const {isPlainObject, isFunction, noop, cloneDeep, omit} = require('lodash');
const debug = require('debug')('semantic-release:plugins');
const getError = require('../get-error');
const {extractErrors} = require('../utils');
const PLUGINS_DEFINITIONS = require('../definitions/plugins');
const {loadPlugin, parseConfig} = require('./utils');
import {cloneDeep, isFunction, isPlainObject, noop, omit} from 'lodash-es';
import debugPlugins from 'debug';
import getError from '../get-error.js';
import {extractErrors} from '../utils.js';
import PLUGINS_DEFINITIONS from '../definitions/plugins.js';
import {loadPlugin, parseConfig} from './utils.js';
module.exports = async (context, type, pluginOpt, pluginsPath) => {
const debug = debugPlugins('semantic-release:plugins');
export default async (context, type, pluginOpt, pluginsPath) => {
const {stdout, stderr, options, logger} = context;
if (!pluginOpt) {
return noop;
@ -64,4 +66,4 @@ module.exports = async (context, type, pluginOpt, pluginsPath) => {
}
return validator;
};
}

View File

@ -1,7 +1,7 @@
const {identity} = require('lodash');
const pReduce = require('p-reduce');
const AggregateError = require('aggregate-error');
const {extractErrors} = require('../utils');
import {identity} from 'lodash-es';
import pReduce from 'p-reduce';
import AggregateError from 'aggregate-error';
import {extractErrors} from '../utils.js';
/**
* A Function that execute a list of function sequencially. If at least one Function ins the pipeline throws an Error or rejects, the pipeline function rejects as well.
@ -25,7 +25,7 @@ const {extractErrors} = require('../utils');
*
* @return {Pipeline} A Function that execute the `steps` sequencially
*/
module.exports = (steps, {settleAll = false, getNextInput = identity, transform = identity} = {}) => async (input) => {
export default (steps, {settleAll = false, getNextInput = identity, transform = identity} = {}) => async (input) => {
const results = [];
const errors = [];
await pReduce(
@ -55,4 +55,4 @@ module.exports = (steps, {settleAll = false, getNextInput = identity, transform
}
return results;
};
}

View File

@ -1,6 +1,9 @@
const {dirname} = require('path');
const {isString, isFunction, castArray, isArray, isPlainObject, isNil} = require('lodash');
const resolveFrom = require('resolve-from');
import {dirname} from 'node:path';
import {fileURLToPath} from 'node:url';
import {castArray, isArray, isFunction, isNil, isPlainObject, isString} from 'lodash-es';
import resolveFrom from 'resolve-from';
const __dirname = dirname(fileURLToPath(import.meta.url));
const validateSteps = (conf) => {
return conf.every((conf) => {
@ -24,7 +27,7 @@ const validateSteps = (conf) => {
});
};
function validatePlugin(conf) {
export function validatePlugin(conf) {
return (
isString(conf) ||
(isArray(conf) &&
@ -35,7 +38,7 @@ function validatePlugin(conf) {
);
}
function validateStep({required}, conf) {
export function validateStep({required}, conf) {
conf = castArray(conf).filter(Boolean);
if (required) {
return conf.length >= 1 && validateSteps(conf);
@ -44,7 +47,7 @@ function validateStep({required}, conf) {
return conf.length === 0 || validateSteps(conf);
}
async function loadPlugin({cwd}, name, pluginsPath) {
export async function loadPlugin({cwd}, name, pluginsPath) {
const basePath = pluginsPath[name]
? dirname(resolveFrom.silent(__dirname, pluginsPath[name]) || resolveFrom(cwd, pluginsPath[name]))
: __dirname;
@ -54,7 +57,7 @@ async function loadPlugin({cwd}, name, pluginsPath) {
return isFunction(name) ? name : (await import(resolveFrom.silent(basePath, name) || resolveFrom(cwd, name))).default;
}
function parseConfig(plugin) {
export function parseConfig(plugin) {
let path;
let config;
if (isArray(plugin)) {
@ -67,5 +70,3 @@ function parseConfig(plugin) {
return [path, config || {}];
}
module.exports = {validatePlugin, validateStep, loadPlugin, parseConfig};

View File

@ -1,12 +1,12 @@
const {isFunction, union, template} = require('lodash');
const semver = require('semver');
const hideSensitive = require('./hide-sensitive');
import {isFunction, template, union} from 'lodash-es';
import semver from 'semver';
import hideSensitive from './hide-sensitive.js';
function extractErrors(err) {
return err && isFunction(err[Symbol.iterator]) ? [...err] : [err];
export function extractErrors(err) {
return err && err.errors ? [...err.errors] : [err];
}
function hideSensitiveValues(env, objs) {
export function hideSensitiveValues(env, objs) {
const hideFunction = hideSensitive(env);
return objs.map((object) => {
Object.getOwnPropertyNames(object).forEach((prop) => {
@ -18,19 +18,19 @@ function hideSensitiveValues(env, objs) {
});
}
function tagsToVersions(tags) {
export function tagsToVersions(tags) {
return tags.map(({version}) => version);
}
function isMajorRange(range) {
export function isMajorRange(range) {
return /^\d+\.x(?:\.x)?$/i.test(range);
}
function isMaintenanceRange(range) {
export function isMaintenanceRange(range) {
return /^\d+\.(?:\d+|x)(?:\.x)?$/i.test(range);
}
function getUpperBound(range) {
export function getUpperBound(range) {
const result = semver.valid(range)
? range
: ((semver.validRange(range) || '').match(/<(?<upperBound>\d+\.\d+\.\d+(-\d+)?)$/) || [])[1];
@ -41,27 +41,27 @@ function getUpperBound(range) {
: result;
}
function getLowerBound(range) {
export function getLowerBound(range) {
return ((semver.validRange(range) || '').match(/(?<lowerBound>\d+\.\d+\.\d+)/) || [])[1];
}
function highest(version1, version2) {
export function highest(version1, version2) {
return version1 && version2 ? (semver.gt(version1, version2) ? version1 : version2) : version1 || version2;
}
function lowest(version1, version2) {
export function lowest(version1, version2) {
return version1 && version2 ? (semver.lt(version1, version2) ? version1 : version2) : version1 || version2;
}
function getLatestVersion(versions, {withPrerelease} = {}) {
export function getLatestVersion(versions, {withPrerelease} = {}) {
return versions.filter((version) => withPrerelease || !semver.prerelease(version)).sort(semver.rcompare)[0];
}
function getEarliestVersion(versions, {withPrerelease} = {}) {
export function getEarliestVersion(versions, {withPrerelease} = {}) {
return versions.filter((version) => withPrerelease || !semver.prerelease(version)).sort(semver.compare)[0];
}
function getFirstVersion(versions, lowerBranches) {
export function getFirstVersion(versions, lowerBranches) {
const lowerVersion = union(...lowerBranches.map(({tags}) => tagsToVersions(tags))).sort(semver.rcompare);
if (lowerVersion[0]) {
return versions.sort(semver.compare).find((version) => semver.gt(version, lowerVersion[0]));
@ -70,32 +70,14 @@ function getFirstVersion(versions, lowerBranches) {
return getEarliestVersion(versions);
}
function getRange(min, max) {
export function getRange(min, max) {
return `>=${min}${max ? ` <${max}` : ''}`;
}
function makeTag(tagFormat, version) {
export function makeTag(tagFormat, version) {
return template(tagFormat)({version});
}
function isSameChannel(channel, otherChannel) {
export function isSameChannel(channel, otherChannel) {
return channel === otherChannel || (!channel && !otherChannel);
}
module.exports = {
extractErrors,
hideSensitiveValues,
tagsToVersions,
isMajorRange,
isMaintenanceRange,
getUpperBound,
getLowerBound,
highest,
lowest,
getLatestVersion,
getEarliestVersion,
getFirstVersion,
getRange,
makeTag,
isSameChannel,
};

View File

@ -1,9 +1,9 @@
const {template, isString, isPlainObject} = require('lodash');
const AggregateError = require('aggregate-error');
const {isGitRepo, verifyTagName} = require('./git');
const getError = require('./get-error');
import {isPlainObject, isString, template} from 'lodash-es';
import AggregateError from 'aggregate-error';
import {isGitRepo, verifyTagName} from './git.js';
import getError from './get-error.js';
module.exports = async (context) => {
export default async (context) => {
const {
cwd,
env,
@ -40,4 +40,4 @@ module.exports = async (context) => {
if (errors.length > 0) {
throw new AggregateError(errors);
}
};
}

2618
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -2,11 +2,16 @@
"name": "semantic-release",
"description": "Automated semver compliant package publishing",
"version": "0.0.0-development",
"type": "module",
"author": "Stephan Bönnemann <stephan@boennemann.me> (http://boennemann.me)",
"ava": {
"files": [
"test/**/*.test.js"
],
"nodeArguments": [
"--loader=testdouble",
"--no-warnings"
],
"timeout": "2m"
},
"bin": {
@ -17,7 +22,8 @@
},
"contributors": [
"Gregor Martynus (https://twitter.com/gr2m)",
"Pierre Vanduynslager (https://twitter.com/@pvdlg_)"
"Pierre Vanduynslager (https://twitter.com/@pvdlg_)",
"Matt Travi <npm@travi.org> (https://matt.travi.org/)"
],
"dependencies": {
"@semantic-release/commit-analyzer": "^9.0.2",
@ -25,29 +31,29 @@
"@semantic-release/github": "^8.0.0",
"@semantic-release/npm": "^9.0.0",
"@semantic-release/release-notes-generator": "^10.0.0",
"aggregate-error": "^3.0.0",
"aggregate-error": "^4.0.1",
"cosmiconfig": "^7.0.0",
"debug": "^4.0.0",
"env-ci": "^5.0.0",
"execa": "^5.0.0",
"figures": "^3.0.0",
"find-versions": "^4.0.0",
"env-ci": "8.0.0-beta.1",
"execa": "^6.1.0",
"figures": "^5.0.0",
"find-versions": "^5.1.0",
"get-stream": "^6.0.0",
"git-log-parser": "^1.2.0",
"hook-std": "^2.0.0",
"hosted-git-info": "^4.0.0",
"lodash": "^4.17.21",
"marked": "^4.0.10",
"marked-terminal": "^5.0.0",
"hook-std": "^3.0.0",
"hosted-git-info": "^5.1.0",
"lodash-es": "^4.17.21",
"marked": "^4.1.0",
"marked-terminal": "^5.1.1",
"micromatch": "^4.0.2",
"p-each-series": "^2.1.0",
"p-reduce": "^2.0.0",
"read-pkg-up": "^7.0.0",
"p-each-series": "^3.0.0",
"p-reduce": "^3.0.0",
"read-pkg-up": "^9.1.0",
"resolve-from": "^5.0.0",
"semver": "^7.3.2",
"semver-diff": "^3.1.1",
"signale": "^1.2.1",
"yargs": "^16.2.0"
"yargs": "^17.5.1"
},
"devDependencies": {
"ava": "4.3.3",
@ -56,16 +62,16 @@
"codecov": "3.8.3",
"delay": "5.0.0",
"dockerode": "3.3.4",
"file-url": "3.0.0",
"fs-extra": "9.1.0",
"got": "11.8.5",
"file-url": "^4.0.0",
"fs-extra": "^10.1.0",
"got": "^12.5.0",
"js-yaml": "4.1.0",
"mockserver-client": "5.14.0",
"nock": "13.2.9",
"p-retry": "4.6.2",
"p-retry": "^5.1.1",
"sinon": "14.0.0",
"stream-buffers": "3.0.2",
"tempy": "1.0.1",
"tempy": "^3.0.0",
"testdouble": "3.16.6",
"xo": "0.32.1"
},
@ -122,8 +128,8 @@
"lint": "xo",
"pretest": "npm run lint",
"semantic-release": "./bin/semantic-release.js",
"test": "c8 ava -v",
"test:ci": "c8 ava -v"
"test": "c8 ava --verbose",
"test:ci": "c8 ava --verbose"
},
"xo": {
"prettier": true,

View File

@ -1,7 +1,7 @@
const test = require('ava');
const {union} = require('lodash');
const semver = require('semver');
const td = require('testdouble');
import test from 'ava';
import {union} from 'lodash-es';
import semver from 'semver';
import * as td from 'testdouble';
const getBranch = (branches, branch) => branches.find(({name}) => name === branch);
const release = (branches, name, version) => getBranch(branches, name).tags.push({version});
@ -11,8 +11,21 @@ const merge = (branches, source, target, tag) => {
getBranch(branches, target).tags
);
};
const remoteBranches = [];
const repositoryUrl = 'repositoryUrl';
let expand, getTags, getBranches;
test('Enforce ranges with branching release workflow', async (t) => {
test.beforeEach(async (t) => {
getTags = (await td.replaceEsm('../../lib/branches/get-tags.js')).default;
expand = (await td.replaceEsm('../../lib/branches/expand.js')).default;
getBranches = (await import('../../lib/branches/index.js')).default;
})
test.afterEach.always((t) => {
td.reset();
});
test.serial('Enforce ranges with branching release workflow', async (t) => {
const branches = [
{name: '1.x', tags: []},
{name: '1.0.x', tags: []},
@ -22,14 +35,11 @@ test('Enforce ranges with branching release workflow', async (t) => {
{name: 'beta', prerelease: true, tags: []},
{name: 'alpha', prerelease: true, tags: []},
];
td.replace('../../lib/branches/get-tags', () => branches);
td.replace('../../lib/branches/expand', () => []);
const getBranches = require('../../lib/branches');
const context = {options: {branches}};
td.when(expand(repositoryUrl, context, branches)).thenResolve(remoteBranches);
td.when(getTags(context, remoteBranches)).thenResolve(branches);
let result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({
name,
range,
}));
let result = (await getBranches(repositoryUrl, 'master', context)).map(({name, range}) => ({name, range,}));
t.is(getBranch(result, '1.0.x').range, '>=1.0.0 <1.0.0', 'Cannot release on 1.0.x before a releasing on master');
t.is(getBranch(result, '1.x').range, '>=1.1.0 <1.0.0', 'Cannot release on 1.x before a releasing on master');
t.is(getBranch(result, 'master').range, '>=1.0.0');
@ -37,10 +47,7 @@ test('Enforce ranges with branching release workflow', async (t) => {
t.is(getBranch(result, 'next-major').range, '>=1.0.0');
release(branches, 'master', '1.0.0');
result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({
name,
range,
}));
result = (await getBranches('repositoryUrl', 'master', context)).map(({name, range}) => ({name, range}));
t.is(getBranch(result, '1.0.x').range, '>=1.0.0 <1.0.0', 'Cannot release on 1.0.x before a releasing on master');
t.is(getBranch(result, '1.x').range, '>=1.1.0 <1.0.0', 'Cannot release on 1.x before a releasing on master');
t.is(getBranch(result, 'master').range, '>=1.0.0');
@ -191,7 +198,7 @@ test('Enforce ranges with branching release workflow', async (t) => {
t.is(getBranch(result, '1.x').range, '>=1.2.0 <2.0.0', 'Can release on 1.x only within range');
});
test('Throw SemanticReleaseError for invalid configurations', async (t) => {
test.serial('Throw SemanticReleaseError for invalid configurations', async (t) => {
const branches = [
{name: '123', range: '123', tags: []},
{name: '1.x', tags: []},
@ -201,10 +208,12 @@ test('Throw SemanticReleaseError for invalid configurations', async (t) => {
{name: 'alpha', prerelease: 'alpha', tags: []},
{name: 'preview', prerelease: 'alpha', tags: []},
];
td.replace('../../lib/branches/get-tags', () => branches);
td.replace('../../lib/branches/expand', () => []);
const getBranches = require('../../lib/branches');
const errors = [...(await t.throwsAsync(getBranches('repositoryUrl', 'master', {options: {branches}})))];
const context = {options: {branches}};
td.when(expand(repositoryUrl, context, branches)).thenResolve(remoteBranches);
td.when(getTags(context, remoteBranches)).thenResolve(branches);
const error = await t.throwsAsync(getBranches(repositoryUrl, 'master', context));
const errors = [...error.errors];
t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'EMAINTENANCEBRANCH');
@ -228,16 +237,16 @@ test('Throw SemanticReleaseError for invalid configurations', async (t) => {
t.truthy(errors[4].details);
});
test('Throw a SemanticReleaseError if there is duplicate branches', async (t) => {
test.serial('Throw a SemanticReleaseError if there is duplicate branches', async (t) => {
const branches = [
{name: 'master', tags: []},
{name: 'master', tags: []},
];
td.replace('../../lib/branches/get-tags', () => branches);
td.replace('../../lib/branches/expand', () => []);
const getBranches = require('../../lib/branches');
const context = {options: {branches}};
td.when(expand(repositoryUrl, context, branches)).thenResolve(remoteBranches);
td.when(getTags(context, remoteBranches)).thenResolve(branches);
const errors = [...(await t.throwsAsync(getBranches('repositoryUrl', 'master', {options: {branches}})))];
const errors = [...(await t.throwsAsync(getBranches(repositoryUrl, 'master', context))).errors];
t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'EDUPLICATEBRANCHES');
@ -245,16 +254,17 @@ test('Throw a SemanticReleaseError if there is duplicate branches', async (t) =>
t.truthy(errors[0].details);
});
test('Throw a SemanticReleaseError for each invalid branch name', async (t) => {
test.serial('Throw a SemanticReleaseError for each invalid branch name', async (t) => {
const branches = [
{name: '~master', tags: []},
{name: '^master', tags: []},
];
td.replace('../../lib/branches/get-tags', () => branches);
td.replace('../../lib/branches/expand', () => []);
const getBranches = require('../../lib/branches');
const context = {options: {branches}};
const remoteBranches = [];
td.when(expand(repositoryUrl, context, branches)).thenResolve(remoteBranches);
td.when(getTags(context, remoteBranches)).thenResolve(branches);
const errors = [...(await t.throwsAsync(getBranches('repositoryUrl', 'master', {options: {branches}})))];
const errors = [...(await t.throwsAsync(getBranches(repositoryUrl, 'master', context))).errors];
t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'EINVALIDBRANCHNAME');

View File

@ -1,6 +1,6 @@
const test = require('ava');
const expand = require('../../lib/branches/expand');
const {gitRepo, gitCommits, gitCheckout, gitPush} = require('../helpers/git-utils');
import test from 'ava';
import expand from '../../lib/branches/expand.js';
import {gitCheckout, gitCommits, gitPush, gitRepo} from '../helpers/git-utils.js';
test('Expand branches defined with globs', async (t) => {
const {cwd, repositoryUrl} = await gitRepo(true);

View File

@ -1,6 +1,6 @@
const test = require('ava');
const getTags = require('../../lib/branches/get-tags');
const {gitRepo, gitCommits, gitTagVersion, gitCheckout, gitAddNote} = require('../helpers/git-utils');
import test from 'ava';
import getTags from '../../lib/branches/get-tags.js';
import {gitAddNote, gitCheckout, gitCommits, gitRepo, gitTagVersion} from '../helpers/git-utils.js';
test('Get the valid tags', async (t) => {
const {cwd} = await gitRepo();

View File

@ -1,5 +1,5 @@
const test = require('ava');
const normalize = require('../../lib/branches/normalize');
import test from 'ava';
import * as normalize from '../../lib/branches/normalize.js';
const toTags = (versions) => versions.map((version) => ({version}));

View File

@ -1,8 +1,8 @@
const test = require('ava');
const {escapeRegExp} = require('lodash');
const td = require('testdouble');
const {stub} = require('sinon');
const {SECRET_REPLACEMENT} = require('../lib/definitions/constants');
import test from 'ava';
import {escapeRegExp} from 'lodash-es';
import * as td from 'testdouble';
import {stub} from 'sinon';
import {SECRET_REPLACEMENT} from '../lib/definitions/constants.js';
let previousArgv;
let previousEnv;
@ -27,10 +27,11 @@ test.afterEach.always((t) => {
process.argv = previousArgv;
process.env = previousEnv;
td.reset();
});
test.serial('Pass options to semantic-release API', async (t) => {
const run = stub().resolves(true);
const argv = [
'',
'',
@ -72,33 +73,49 @@ test.serial('Pass options to semantic-release API', async (t) => {
'--debug',
'-d',
];
td.replace('..', run);
const index = await td.replaceEsm('../index.js');
process.argv = argv;
const cli = require('../cli');
const cli = (await import('../cli.js')).default;
const exitCode = await cli();
t.deepEqual(run.args[0][0].branches, ['master', 'next']);
t.is(run.args[0][0].repositoryUrl, 'https://github/com/owner/repo.git');
t.is(run.args[0][0].tagFormat, `v\${version}`);
t.deepEqual(run.args[0][0].plugins, ['plugin1', 'plugin2']);
t.deepEqual(run.args[0][0].extends, ['config1', 'config2']);
t.deepEqual(run.args[0][0].verifyConditions, ['condition1', 'condition2']);
t.is(run.args[0][0].analyzeCommits, 'analyze');
t.deepEqual(run.args[0][0].verifyRelease, ['verify1', 'verify2']);
t.deepEqual(run.args[0][0].generateNotes, ['notes']);
t.deepEqual(run.args[0][0].prepare, ['prepare1', 'prepare2']);
t.deepEqual(run.args[0][0].publish, ['publish1', 'publish2']);
t.deepEqual(run.args[0][0].success, ['success1', 'success2']);
t.deepEqual(run.args[0][0].fail, ['fail1', 'fail2']);
t.is(run.args[0][0].debug, true);
t.is(run.args[0][0].dryRun, true);
td.verify(index.default({
branches: ['master', 'next'],
b: ['master', 'next'],
'repository-url': 'https://github/com/owner/repo.git',
repositoryUrl: 'https://github/com/owner/repo.git',
r: 'https://github/com/owner/repo.git',
'tag-format': `v\${version}`,
tagFormat: `v\${version}`,
t: `v\${version}`,
plugins: ['plugin1', 'plugin2'],
p: ['plugin1', 'plugin2'],
extends: ['config1', 'config2'],
e: ['config1', 'config2'],
'dry-run': true,
dryRun: true,
d: true,
verifyConditions: ['condition1', 'condition2'],
'verify-conditions': ['condition1', 'condition2'],
analyzeCommits: 'analyze',
'analyze-commits': 'analyze',
verifyRelease: ['verify1', 'verify2'],
'verify-release': ['verify1', 'verify2'],
generateNotes: ['notes'],
'generate-notes': ['notes'],
prepare: ['prepare1', 'prepare2'],
publish: ['publish1', 'publish2'],
success: ['success1', 'success2'],
fail: ['fail1', 'fail2'],
debug: true,
_: [],
'$0': ''
}));
t.is(exitCode, 0);
});
test.serial('Pass options to semantic-release API with alias arguments', async (t) => {
const run = stub().resolves(true);
const argv = [
'',
'',
@ -116,48 +133,65 @@ test.serial('Pass options to semantic-release API with alias arguments', async (
'config2',
'--dry-run',
];
td.replace('..', run);
const index = await td.replaceEsm('../index.js');
process.argv = argv;
const cli = require('../cli');
const cli = (await import('../cli.js')).default;
const exitCode = await cli();
t.deepEqual(run.args[0][0].branches, ['master']);
t.is(run.args[0][0].repositoryUrl, 'https://github/com/owner/repo.git');
t.is(run.args[0][0].tagFormat, `v\${version}`);
t.deepEqual(run.args[0][0].plugins, ['plugin1', 'plugin2']);
t.deepEqual(run.args[0][0].extends, ['config1', 'config2']);
t.is(run.args[0][0].dryRun, true);
td.verify(index.default({
branches: ['master'],
b: ['master'],
'repository-url': 'https://github/com/owner/repo.git',
repositoryUrl: 'https://github/com/owner/repo.git',
r: 'https://github/com/owner/repo.git',
'tag-format': `v\${version}`,
tagFormat: `v\${version}`,
t: `v\${version}`,
plugins: ['plugin1', 'plugin2'],
p: ['plugin1', 'plugin2'],
extends: ['config1', 'config2'],
e: ['config1', 'config2'],
'dry-run': true,
dryRun: true,
d: true,
_: [],
'$0': ''
}));
t.is(exitCode, 0);
});
test.serial('Pass unknown options to semantic-release API', async (t) => {
const run = stub().resolves(true);
const argv = ['', '', '--bool', '--first-option', 'value1', '--second-option', 'value2', '--second-option', 'value3'];
td.replace('..', run);
const index = await td.replaceEsm('../index.js');
process.argv = argv;
const cli = require('../cli');
const cli = (await import('../cli.js')).default;
const exitCode = await cli();
t.is(run.args[0][0].bool, true);
t.is(run.args[0][0].firstOption, 'value1');
t.deepEqual(run.args[0][0].secondOption, ['value2', 'value3']);
td.verify(index.default({
bool: true,
firstOption: 'value1',
'first-option': 'value1',
secondOption: ['value2', 'value3'],
'second-option': ['value2', 'value3'],
_: [],
'$0': ''
}));
t.is(exitCode, 0);
});
test.serial('Pass empty Array to semantic-release API for list option set to "false"', async (t) => {
const run = stub().resolves(true);
const argv = ['', '', '--publish', 'false'];
td.replace('..', run);
const index = await td.replaceEsm('../index.js');
process.argv = argv;
const cli = require('../cli');
const cli = (await import('../cli.js')).default;
const exitCode = await cli();
t.deepEqual(run.args[0][0].publish, []);
td.verify(index.default({publish: [], _: [], '$0': ''}));
t.is(exitCode, 0);
});
@ -165,9 +199,9 @@ test.serial('Pass empty Array to semantic-release API for list option set to "fa
test.serial('Do not set properties in option for which arg is not in command line', async (t) => {
const run = stub().resolves(true);
const argv = ['', '', '-b', 'master'];
td.replace('..', run);
await td.replaceEsm('../index.js', null, run);
process.argv = argv;
const cli = require('../cli');
const cli = (await import('../cli.js')).default;
await cli();
@ -184,9 +218,9 @@ test.serial('Do not set properties in option for which arg is not in command lin
test.serial('Display help', async (t) => {
const run = stub().resolves(true);
const argv = ['', '', '--help'];
td.replace('..', run);
await td.replaceEsm('../index.js', null, run);
process.argv = argv;
const cli = require('../cli');
const cli = (await import('../cli.js')).default;
const exitCode = await cli();
@ -197,9 +231,9 @@ test.serial('Display help', async (t) => {
test.serial('Return error exitCode and prints help if called with a command', async (t) => {
const run = stub().resolves(true);
const argv = ['', '', 'pre'];
td.replace('..', run);
await td.replaceEsm('../index.js', null, run);
process.argv = argv;
const cli = require('../cli');
const cli = (await import('../cli.js')).default;
const exitCode = await cli();
@ -211,9 +245,9 @@ test.serial('Return error exitCode and prints help if called with a command', as
test.serial('Return error exitCode if multiple plugin are set for single plugin', async (t) => {
const run = stub().resolves(true);
const argv = ['', '', '--analyze-commits', 'analyze1', 'analyze2'];
td.replace('..', run);
await td.replaceEsm('../index.js', null, run);
process.argv = argv;
const cli = require('../cli');
const cli = (await import('../cli.js')).default;
const exitCode = await cli();
@ -223,11 +257,11 @@ test.serial('Return error exitCode if multiple plugin are set for single plugin'
});
test.serial('Return error exitCode if semantic-release throw error', async (t) => {
const run = stub().rejects(new Error('semantic-release error'));
const argv = ['', ''];
td.replace('..', run);
const index = await td.replaceEsm('../index.js');
td.when(index.default({_: [], '$0': ''})).thenReject(new Error('semantic-release error'));
process.argv = argv;
const cli = require('../cli');
const cli = (await import('../cli.js')).default;
const exitCode = await cli();
@ -237,12 +271,12 @@ test.serial('Return error exitCode if semantic-release throw error', async (t) =
test.serial('Hide sensitive environment variable values from the logs', async (t) => {
const env = {MY_TOKEN: 'secret token'};
const run = stub().rejects(new Error(`Throw error: Exposing token ${env.MY_TOKEN}`));
const argv = ['', ''];
td.replace('..', run);
const index = await td.replaceEsm('../index.js');
td.when(index.default({_: [], '$0': ''})).thenReject(new Error(`Throw error: Exposing token ${env.MY_TOKEN}`));
process.argv = argv;
process.env = {...process.env, ...env};
const cli = require('../cli');
const cli = (await import('../cli.js')).default;
const exitCode = await cli();

View File

@ -1,5 +1,5 @@
const test = require('ava');
const {maintenance, prerelease, release} = require('../../lib/definitions/branches');
import test from 'ava';
import {maintenance, prerelease, release} from '../../lib/definitions/branches.js';
test('A "maintenance" branch is identified by having a "range" property or a "name" formatted like "N.x", "N.x.x" or "N.N.x"', (t) => {
/* eslint-disable unicorn/no-fn-reference-in-iterator */

View File

@ -1,6 +1,6 @@
const test = require('ava');
const plugins = require('../../lib/definitions/plugins');
const {RELEASE_NOTES_SEPARATOR, SECRET_REPLACEMENT} = require('../../lib/definitions/constants');
import test from 'ava';
import plugins from '../../lib/definitions/plugins.js';
import {RELEASE_NOTES_SEPARATOR, SECRET_REPLACEMENT} from '../../lib/definitions/constants.js';
test('The "analyzeCommits" plugin output must be either undefined or a valid semver release type', (t) => {
t.false(plugins.analyzeCommits.outputValidator('invalid'));

View File

@ -1 +1 @@
module.exports = () => {};
export default () => {}

View File

@ -1,4 +1,4 @@
const SemanticReleaseError = require('@semantic-release/error');
import SemanticReleaseError from '@semantic-release/error';
class InheritedError extends SemanticReleaseError {
constructor(message, code) {
@ -9,6 +9,6 @@ class InheritedError extends SemanticReleaseError {
}
}
module.exports = () => {
export default () => {
throw new InheritedError('Inherited error', 'EINHERITED');
};
}

View File

@ -1,5 +1,5 @@
module.exports = () => {
export default () => {
const error = new Error('a');
error.errorProperty = 'errorProperty';
throw error;
};
}

View File

@ -1,5 +1,5 @@
const AggregateError = require('aggregate-error');
import AggregateError from 'aggregate-error';
module.exports = () => {
export default () => {
throw new AggregateError([new Error('a'), new Error('b')]);
};
}

View File

@ -1 +1 @@
module.exports = (pluginConfig, context) => context;
export default (pluginConfig, context) => context

View File

@ -1,6 +1,6 @@
module.exports = (pluginConfig, {env, logger}) => {
export default (pluginConfig, {env, logger}) => {
console.log(`Console: Exposing token ${env.MY_TOKEN}`);
logger.log(`Log: Exposing token ${env.MY_TOKEN}`);
logger.error(`Error: Console token ${env.MY_TOKEN}`);
throw new Error(`Throw error: Exposing ${env.MY_TOKEN}`);
};
}

View File

@ -1 +1 @@
module.exports = (pluginConfig, context) => ({pluginConfig, context});
export default (pluginConfig, context) => ({pluginConfig, context})

View File

@ -1,7 +1,7 @@
const test = require('ava');
const {stub} = require('sinon');
const getCommits = require('../lib/get-commits');
const {gitRepo, gitCommits, gitDetachedHead} = require('./helpers/git-utils');
import test from 'ava';
import {stub} from 'sinon';
import getCommits from '../lib/get-commits.js';
import {gitCommits, gitDetachedHead, gitRepo} from './helpers/git-utils.js';
test.beforeEach((t) => {
// Stub the logger functions

View File

@ -1,12 +1,15 @@
const path = require('path');
const {format} = require('util');
const test = require('ava');
const {writeFile, outputJson} = require('fs-extra');
const {omit} = require('lodash');
const td = require('testdouble');
const {stub} = require('sinon');
const yaml = require('js-yaml');
const {gitRepo, gitTagVersion, gitCommits, gitShallowClone, gitAddConfig} = require('./helpers/git-utils');
import path from 'node:path';
import {format} from 'node:util';
import test from 'ava';
import fsExtra from 'fs-extra';
import {omit} from 'lodash-es';
import * as td from 'testdouble';
import yaml from 'js-yaml';
import {gitAddConfig, gitCommits, gitRepo, gitShallowClone, gitTagVersion} from './helpers/git-utils.js';
const {outputJson, writeFile} = fsExtra;
const pluginsConfig = {foo: 'bar', baz: 'qux'};
let plugins;
const DEFAULT_PLUGINS = [
'@semantic-release/commit-analyzer',
@ -15,10 +18,13 @@ const DEFAULT_PLUGINS = [
'@semantic-release/github',
];
test.beforeEach((t) => {
t.context.plugins = stub().returns({});
td.replace('../lib/plugins', t.context.plugins);
t.context.getConfig = require('../lib/get-config');
test.beforeEach(async (t) => {
plugins = (await td.replaceEsm('../lib/plugins/index.js')).default;
t.context.getConfig = (await import('../lib/get-config.js')).default;
});
test.afterEach.always((t) => {
td.reset();
});
test('Default values, reading repositoryUrl from package.json', async (t) => {
@ -103,7 +109,7 @@ test('Convert "ci" option to "noCi"', async (t) => {
t.is(result.noCi, true);
});
test('Read options from package.json', async (t) => {
test.serial('Read options from package.json', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo();
const options = {
@ -114,19 +120,18 @@ test('Read options from package.json', async (t) => {
tagFormat: `v\${version}`,
plugins: false,
};
// Verify the plugins module is called with the plugin options from package.json
td.when(plugins({cwd, options}, {})).thenResolve(pluginsConfig);
// Create package.json in repository root
await outputJson(path.resolve(cwd, 'package.json'), {release: options});
const {options: result} = await t.context.getConfig({cwd});
const result = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from package.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from package.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
t.deepEqual(result, {options, plugins: pluginsConfig});
});
test('Read options from .releaserc.yml', async (t) => {
test.serial('Read options from .releaserc.yml', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo();
const options = {
@ -138,17 +143,16 @@ test('Read options from .releaserc.yml', async (t) => {
};
// Create package.json in repository root
await writeFile(path.resolve(cwd, '.releaserc.yml'), yaml.dump(options));
const {options: result} = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from package.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from package.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
td.when(plugins({cwd, options}, {})).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from package.json
t.deepEqual(result, {options, plugins: pluginsConfig});
});
test('Read options from .releaserc.json', async (t) => {
test.serial('Read options from .releaserc.json', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo();
const options = {
@ -160,17 +164,16 @@ test('Read options from .releaserc.json', async (t) => {
};
// Create package.json in repository root
await outputJson(path.resolve(cwd, '.releaserc.json'), options);
const {options: result} = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from package.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from package.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
td.when(plugins({cwd, options}, {})).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from package.json
t.deepEqual(result, {options, plugins: pluginsConfig});
});
test('Read options from .releaserc.js', async (t) => {
test.serial('Read options from .releaserc.js', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo();
const options = {
@ -182,17 +185,16 @@ test('Read options from .releaserc.js', async (t) => {
};
// Create package.json in repository root
await writeFile(path.resolve(cwd, '.releaserc.js'), `module.exports = ${JSON.stringify(options)}`);
const {options: result} = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from package.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from package.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
td.when(plugins({cwd, options}, {})).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from package.json
t.deepEqual(result, {options, plugins: pluginsConfig});
});
test('Read options from .releaserc.cjs', async (t) => {
test.serial('Read options from .releaserc.cjs', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo();
const options = {
@ -204,17 +206,16 @@ test('Read options from .releaserc.cjs', async (t) => {
};
// Create .releaserc.cjs in repository root
await writeFile(path.resolve(cwd, '.releaserc.cjs'), `module.exports = ${JSON.stringify(options)}`);
const {options: result} = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from .releaserc.cjs
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from .releaserc.cjs
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
td.when(plugins({cwd, options}, {})).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from .releaserc.cjs
t.deepEqual(result, {options, plugins: pluginsConfig});
});
test('Read options from release.config.js', async (t) => {
test.serial('Read options from release.config.js', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo();
const options = {
@ -226,17 +227,16 @@ test('Read options from release.config.js', async (t) => {
};
// Create package.json in repository root
await writeFile(path.resolve(cwd, 'release.config.js'), `module.exports = ${JSON.stringify(options)}`);
const {options: result} = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from package.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from package.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
td.when(plugins({cwd, options}, {})).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from package.json
t.deepEqual(result, {options, plugins: pluginsConfig});
});
test('Read options from release.config.cjs', async (t) => {
test.serial('Read options from release.config.cjs', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo();
const options = {
@ -246,19 +246,18 @@ test('Read options from release.config.cjs', async (t) => {
tagFormat: `v\${version}`,
plugins: false,
};
// Verify the plugins module is called with the plugin options from release.config.cjs
td.when(plugins({cwd, options}, {})).thenResolve(pluginsConfig);
// Create release.config.cjs in repository root
await writeFile(path.resolve(cwd, 'release.config.cjs'), `module.exports = ${JSON.stringify(options)}`);
const {options: result} = await t.context.getConfig({cwd});
const result = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from release.config.cjs
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from release.config.cjs
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
t.deepEqual(result, {options, plugins: pluginsConfig});
});
test('Prioritise CLI/API parameters over file configuration and git repo', async (t) => {
test.serial('Prioritise CLI/API parameters over file configuration and git repo', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
let {cwd, repositoryUrl} = await gitRepo();
await gitCommits(['First'], {cwd});
@ -275,20 +274,19 @@ test('Prioritise CLI/API parameters over file configuration and git repo', async
tagFormat: `cli\${version}`,
plugins: false,
};
// Verify the plugins module is called with the plugin options from CLI/API
td.when(plugins({cwd, options}, {})).thenResolve(pluginsConfig);
const pkg = {release: pkgOptions, repository: 'git@host.null:owner/module.git'};
// Create package.json in repository root
await outputJson(path.resolve(cwd, 'package.json'), pkg);
const result = await t.context.getConfig({cwd}, options);
const expected = {...options, branches: ['branch_cli']};
// Verify the options contains the plugin config from CLI/API
t.deepEqual(result.options, expected);
// Verify the plugins module is called with the plugin options from CLI/API
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
t.deepEqual(result, {options, plugins: pluginsConfig});
});
test('Read configuration from file path in "extends"', async (t) => {
test.serial('Read configuration from file path in "extends"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo();
const pkgOptions = {extends: './shareable.json'};
@ -303,23 +301,24 @@ test('Read configuration from file path in "extends"', async (t) => {
// Create package.json and shareable.json in repository root
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
await outputJson(path.resolve(cwd, 'shareable.json'), options);
const {options: result} = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from shareable.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from shareable.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
t.deepEqual(t.context.plugins.args[0][1], {
td.when(plugins(
{cwd, options},
{
analyzeCommits: './shareable.json',
generateNotes: './shareable.json',
'plugin-1': './shareable.json',
'plugin-2': './shareable.json',
});
}
)).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from shareable.json
t.deepEqual(result, {options, plugins: pluginsConfig});
});
test('Read configuration from module path in "extends"', async (t) => {
test.serial('Read configuration from module path in "extends"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo();
const pkgOptions = {extends: 'shareable'};
@ -334,21 +333,19 @@ test('Read configuration from module path in "extends"', async (t) => {
// Create package.json and shareable.json in repository root
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
await outputJson(path.resolve(cwd, 'node_modules/shareable/index.json'), options);
const {options: result} = await t.context.getConfig({cwd});
const expected = {...options, branches: ['test_branch']};
// Verify the options contains the plugin config from shareable.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from shareable.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
t.deepEqual(t.context.plugins.args[0][1], {
analyzeCommits: 'shareable',
generateNotes: 'shareable',
});
td.when(plugins(
{cwd, options},
{analyzeCommits: 'shareable', generateNotes: 'shareable'}
)).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from shareable.json
t.deepEqual(result, {options, plugins: pluginsConfig});
});
test('Read configuration from an array of paths in "extends"', async (t) => {
test.serial('Read configuration from an array of paths in "extends"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo();
const pkgOptions = {extends: ['./shareable1.json', './shareable2.json']};
@ -370,24 +367,26 @@ test('Read configuration from an array of paths in "extends"', async (t) => {
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
await outputJson(path.resolve(cwd, 'shareable1.json'), options1);
await outputJson(path.resolve(cwd, 'shareable2.json'), options2);
const {options: result} = await t.context.getConfig({cwd});
const expected = {...options1, ...options2, branches: ['test_branch']};
// Verify the options contains the plugin config from shareable1.json and shareable2.json
t.deepEqual(result, expected);
const expectedOptions = {...options1, ...options2, branches: ['test_branch']};
// Verify the plugins module is called with the plugin options from shareable1.json and shareable2.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
t.deepEqual(t.context.plugins.args[0][1], {
td.when(plugins(
{options: expectedOptions, cwd},
{
verifyRelease1: './shareable1.json',
verifyRelease2: './shareable2.json',
generateNotes2: './shareable2.json',
analyzeCommits1: './shareable1.json',
analyzeCommits2: './shareable2.json',
});
}
)).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from shareable1.json and shareable2.json
t.deepEqual(result, {options: expectedOptions, plugins: pluginsConfig});
});
test('Prioritize configuration from config file over "extends"', async (t) => {
test.serial('Prioritize configuration from config file over "extends"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo();
const pkgOptions = {
@ -408,22 +407,24 @@ test('Prioritize configuration from config file over "extends"', async (t) => {
// Create package.json and shareable.json in repository root
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
await outputJson(path.resolve(cwd, 'shareable.json'), options1);
const {options: result} = await t.context.getConfig({cwd});
const expected = omit({...options1, ...pkgOptions, branches: ['test_pkg']}, 'extends');
// Verify the options contains the plugin config from package.json and shareable.json
t.deepEqual(result, expected);
const expectedOptions = omit({...options1, ...pkgOptions, branches: ['test_pkg']}, 'extends');
// Verify the plugins module is called with the plugin options from package.json and shareable.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
t.deepEqual(t.context.plugins.args[0][1], {
td.when(plugins(
{cwd, options: expectedOptions},
{
analyzeCommits: './shareable.json',
generateNotesShareable: './shareable.json',
publishShareable: './shareable.json',
});
}
)).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from package.json and shareable.json
t.deepEqual(result, {options: expectedOptions, plugins: pluginsConfig});
});
test('Prioritize configuration from cli/API options over "extends"', async (t) => {
test.serial('Prioritize configuration from cli/API options over "extends"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo();
const cliOptions = {
@ -456,17 +457,20 @@ test('Prioritize configuration from cli/API options over "extends"', async (t) =
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
await outputJson(path.resolve(cwd, 'shareable1.json'), options1);
await outputJson(path.resolve(cwd, 'shareable2.json'), options2);
const {options: result} = await t.context.getConfig({cwd}, cliOptions);
const expected = omit({...options2, ...pkgOptions, ...cliOptions, branches: ['branch_opts']}, 'extends');
// Verify the options contains the plugin config from package.json and shareable2.json
t.deepEqual(result, expected);
const expectedOptions = omit({...options2, ...pkgOptions, ...cliOptions, branches: ['branch_opts']}, 'extends');
// Verify the plugins module is called with the plugin options from package.json and shareable2.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
td.when(plugins(
{cwd, options: expectedOptions},
{analyzeCommits2: './shareable2.json', publishShareable: './shareable2.json'}
)).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd}, cliOptions);
// Verify the options contains the plugin config from package.json and shareable2.json
t.deepEqual(result, {options: expectedOptions, plugins: pluginsConfig});
});
test('Allow to unset properties defined in shareable config with "null"', async (t) => {
test.serial('Allow to unset properties defined in shareable config with "null"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo();
const pkgOptions = {
@ -485,33 +489,40 @@ test('Allow to unset properties defined in shareable config with "null"', async
// Create package.json and shareable.json in repository root
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
await outputJson(path.resolve(cwd, 'shareable.json'), options1);
const {options} = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from shareable.json and the default `plugins`
t.deepEqual(options, {
...omit(options1, ['analyzeCommits']),
...omit(pkgOptions, ['extends', 'analyzeCommits']),
plugins: DEFAULT_PLUGINS,
});
// Verify the plugins module is called with the plugin options from shareable.json and the default `plugins`
t.deepEqual(t.context.plugins.args[0][0], {
td.when(plugins(
{
options: {
...omit(options1, 'analyzeCommits'),
...omit(pkgOptions, ['extends', 'analyzeCommits']),
plugins: DEFAULT_PLUGINS,
},
cwd,
});
t.deepEqual(t.context.plugins.args[0][1], {
},
{
generateNotes: './shareable.json',
analyzeCommits: './shareable.json',
'test-plugin': './shareable.json',
});
}
)).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from shareable.json and the default `plugins`
t.deepEqual(
result,
{
options: {
...omit(options1, ['analyzeCommits']),
...omit(pkgOptions, ['extends', 'analyzeCommits']),
plugins: DEFAULT_PLUGINS,
},
plugins: pluginsConfig
}
);
});
test('Allow to unset properties defined in shareable config with "undefined"', async (t) => {
test.serial('Allow to unset properties defined in shareable config with "undefined"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo();
const pkgOptions = {
@ -526,25 +537,24 @@ test('Allow to unset properties defined in shareable config with "undefined"', a
tagFormat: `v\${version}`,
plugins: false,
};
// Create package.json and release.config.js in repository root
// Create release.config.js and shareable.json in repository root
await writeFile(path.resolve(cwd, 'release.config.js'), `module.exports = ${format(pkgOptions)}`);
await outputJson(path.resolve(cwd, 'shareable.json'), options1);
const {options: result} = await t.context.getConfig({cwd});
const expected = {
const expectedOptions = {
...omit(options1, 'analyzeCommits'),
...omit(pkgOptions, ['extends', 'analyzeCommits']),
branches: ['test_branch'],
};
// Verify the options contains the plugin config from shareable.json
t.deepEqual(result, expected);
// Verify the plugins module is called with the plugin options from shareable.json
t.deepEqual(t.context.plugins.args[0][0], {options: expected, cwd});
t.deepEqual(t.context.plugins.args[0][1], {
generateNotes: './shareable.json',
analyzeCommits: './shareable.json',
});
td.when(plugins(
{options: expectedOptions, cwd},
{generateNotes: './shareable.json', analyzeCommits: './shareable.json'}
)).thenResolve(pluginsConfig);
const result = await t.context.getConfig({cwd});
// Verify the options contains the plugin config from shareable.json
t.deepEqual(result, {options: expectedOptions, plugins: pluginsConfig});
});
test('Throw an Error if one of the shareable config cannot be found', async (t) => {

View File

@ -1,6 +1,6 @@
const test = require('ava');
const getAuthUrl = require('../lib/get-git-auth-url');
const {gitRepo} = require('./helpers/git-utils');
import test from 'ava';
import getAuthUrl from '../lib/get-git-auth-url.js';
import {gitRepo} from './helpers/git-utils.js';
const env = {GIT_ASKPASS: 'echo', GIT_TERMINAL_PROMPT: 0};

View File

@ -1,5 +1,5 @@
const test = require('ava');
const getLastRelease = require('../lib/get-last-release');
import test from 'ava';
import getLastRelease from '../lib/get-last-release.js';
test('Get the highest non-prerelease valid tag', (t) => {
const result = getLastRelease({

View File

@ -1,6 +1,6 @@
const test = require('ava');
const {spy} = require('sinon');
const getLogger = require('../lib/get-logger');
import test from 'ava';
import {spy} from 'sinon';
import getLogger from '../lib/get-logger.js';
test('Expose "error", "success" and "log" functions', (t) => {
const stdout = spy();

View File

@ -1,6 +1,6 @@
const test = require('ava');
const {stub} = require('sinon');
const getNextVersion = require('../lib/get-next-version');
import test from 'ava';
import {stub} from 'sinon';
import getNextVersion from '../lib/get-next-version.js';
test.beforeEach((t) => {
// Stub the logger functions

View File

@ -1,5 +1,5 @@
const test = require('ava');
const getReleaseToAdd = require('../lib/get-release-to-add');
import test from 'ava';
import getReleaseToAdd from '../lib/get-release-to-add.js';
test('Return versions merged from release to maintenance branch, excluding lower than branch start range', (t) => {
const result = getReleaseToAdd({

View File

@ -1,40 +1,40 @@
const test = require('ava');
const tempy = require('tempy');
const {
getTagHead,
isRefExists,
import test from 'ava';
import {temporaryDirectory} from 'tempy';
import {
addNote,
fetch,
fetchNotes,
getBranches,
getGitHead,
getNote,
getTagHead,
getTags,
isBranchUpToDate,
isGitRepo,
isRefExists,
push,
repoUrl,
tag,
push,
getTags,
getBranches,
isGitRepo,
verifyTagName,
isBranchUpToDate,
getNote,
addNote,
fetchNotes,
} = require('../lib/git');
const {
gitRepo,
gitCommits,
gitCheckout,
gitTagVersion,
gitShallowClone,
gitGetCommits,
verifyTagName
} from '../lib/git.js';
import {
gitAddConfig,
gitAddNote,
gitCheckout,
gitCommits,
gitCommitTag,
gitRemoteTagHead,
gitPush,
gitDetachedHead,
gitDetachedHeadFromBranch,
gitAddNote,
gitGetNote,
gitFetch,
initGit,
} = require('./helpers/git-utils');
gitGetCommits,
gitGetNote,
gitPush,
gitRemoteTagHead,
gitRepo,
gitShallowClone,
gitTagVersion,
initGit
} from './helpers/git-utils.js';
test('Get the last commit sha', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
@ -268,7 +268,7 @@ test('Return "true" if in a Git repository', async (t) => {
});
test('Return falsy if not in a Git repository', async (t) => {
const cwd = tempy.directory();
const cwd = temporaryDirectory();
t.falsy(await isGitRepo({cwd}));
});
@ -288,7 +288,7 @@ test('Return falsy for invalid tag names', async (t) => {
});
test('Throws error if obtaining the tags fails', async (t) => {
const cwd = tempy.directory();
const cwd = temporaryDirectory();
await t.throwsAsync(getTags('master', {cwd}));
});

View File

@ -1,10 +1,10 @@
const tempy = require('tempy');
const execa = require('execa');
const fileUrl = require('file-url');
const pEachSeries = require('p-each-series');
const gitLogParser = require('git-log-parser');
const getStream = require('get-stream');
const {GIT_NOTE_REF} = require('../../lib/definitions/constants');
import {temporaryDirectory} from 'tempy';
import {execa} from 'execa';
import fileUrl from 'file-url';
import pEachSeries from 'p-each-series';
import gitLogParser from 'git-log-parser';
import getStream from 'get-stream';
import {GIT_NOTE_REF} from '../../lib/definitions/constants.js';
/**
* Commit message information.
@ -23,8 +23,8 @@ const {GIT_NOTE_REF} = require('../../lib/definitions/constants');
* @param {Boolean} withRemote `true` to create a shallow clone of a bare repository.
* @return {String} The path of the repository
*/
async function initGit(withRemote) {
const cwd = tempy.directory();
export async function initGit(withRemote) {
const cwd = temporaryDirectory();
const args = withRemote ? ['--bare', '--initial-branch=master'] : ['--initial-branch=master'];
await execa('git', ['init', ...args], {cwd}).catch(() => {
@ -45,7 +45,7 @@ async function initGit(withRemote) {
* @param {String} [branch='master'] The branch to initialize.
* @return {String} The path of the clone if `withRemote` is `true`, the path of the repository otherwise.
*/
async function gitRepo(withRemote, branch = 'master') {
export async function gitRepo(withRemote, branch = 'master') {
let {cwd, repositoryUrl} = await initGit(withRemote);
if (withRemote) {
await initBareRepo(repositoryUrl, branch);
@ -70,8 +70,8 @@ async function gitRepo(withRemote, branch = 'master') {
* @param {String} repositoryUrl The URL of the bare repository.
* @param {String} [branch='master'] the branch to initialize.
*/
async function initBareRepo(repositoryUrl, branch = 'master') {
const cwd = tempy.directory();
export async function initBareRepo(repositoryUrl, branch = 'master') {
const cwd = temporaryDirectory();
await execa('git', ['clone', '--no-hardlinks', repositoryUrl, cwd], {cwd});
await gitCheckout(branch, true, {cwd});
await gitCommits(['Initial commit'], {cwd});
@ -86,7 +86,7 @@ async function initBareRepo(repositoryUrl, branch = 'master') {
*
* @returns {Array<Commit>} The created commits, in reverse order (to match `git log` order).
*/
async function gitCommits(messages, execaOptions) {
export async function gitCommits(messages, execaOptions) {
await pEachSeries(
messages,
async (message) =>
@ -103,7 +103,7 @@ async function gitCommits(messages, execaOptions) {
*
* @return {Array<Object>} The list of parsed commits.
*/
async function gitGetCommits(from, execaOptions) {
export async function gitGetCommits(from, execaOptions) {
Object.assign(gitLogParser.fields, {hash: 'H', message: 'B', gitTags: 'd', committerDate: {key: 'ci', type: Date}});
return (
await getStream.array(
@ -126,7 +126,7 @@ async function gitGetCommits(from, execaOptions) {
* @param {Boolean} create to create the branch, `false` to checkout an existing branch.
* @param {Object} [execaOpts] Options to pass to `execa`.
*/
async function gitCheckout(branch, create, execaOptions) {
export async function gitCheckout(branch, create, execaOptions) {
await execa('git', create ? ['checkout', '-b', branch] : ['checkout', branch], execaOptions);
}
@ -136,7 +136,7 @@ async function gitCheckout(branch, create, execaOptions) {
* @param {String} repositoryUrl The repository remote URL.
* @param {Object} [execaOpts] Options to pass to `execa`.
*/
async function gitFetch(repositoryUrl, execaOptions) {
export async function gitFetch(repositoryUrl, execaOptions) {
await execa('git', ['fetch', repositoryUrl], execaOptions);
}
@ -147,7 +147,7 @@ async function gitFetch(repositoryUrl, execaOptions) {
*
* @return {String} The sha of the head commit in the current git repository.
*/
async function gitHead(execaOptions) {
export async function gitHead(execaOptions) {
return (await execa('git', ['rev-parse', 'HEAD'], execaOptions)).stdout;
}
@ -158,7 +158,7 @@ async function gitHead(execaOptions) {
* @param {String} [sha] The commit on which to create the tag. If undefined the tag is created on the last commit.
* @param {Object} [execaOpts] Options to pass to `execa`.
*/
async function gitTagVersion(tagName, sha, execaOptions) {
export async function gitTagVersion(tagName, sha, execaOptions) {
await execa('git', sha ? ['tag', '-f', tagName, sha] : ['tag', tagName], execaOptions);
}
@ -171,8 +171,8 @@ async function gitTagVersion(tagName, sha, execaOptions) {
* @param {Number} [depth=1] The number of commit to clone.
* @return {String} The path of the cloned repository.
*/
async function gitShallowClone(repositoryUrl, branch = 'master', depth = 1) {
const cwd = tempy.directory();
export async function gitShallowClone(repositoryUrl, branch = 'master', depth = 1) {
const cwd = temporaryDirectory();
await execa('git', ['clone', '--no-hardlinks', '--no-tags', '-b', branch, '--depth', depth, repositoryUrl, cwd], {
cwd,
@ -187,8 +187,8 @@ async function gitShallowClone(repositoryUrl, branch = 'master', depth = 1) {
* @param {Number} head A commit sha of the remote repo that will become the detached head of the new one.
* @return {String} The path of the new repository.
*/
async function gitDetachedHead(repositoryUrl, head) {
const cwd = tempy.directory();
export async function gitDetachedHead(repositoryUrl, head) {
const cwd = temporaryDirectory();
await execa('git', ['init'], {cwd});
await execa('git', ['remote', 'add', 'origin', repositoryUrl], {cwd});
@ -197,8 +197,8 @@ async function gitDetachedHead(repositoryUrl, head) {
return cwd;
}
async function gitDetachedHeadFromBranch(repositoryUrl, branch, head) {
const cwd = tempy.directory();
export async function gitDetachedHeadFromBranch(repositoryUrl, branch, head) {
const cwd = temporaryDirectory();
await execa('git', ['init'], {cwd});
await execa('git', ['remote', 'add', 'origin', repositoryUrl], {cwd});
@ -215,7 +215,7 @@ async function gitDetachedHeadFromBranch(repositoryUrl, branch, head) {
* @param {String} value Config value.
* @param {Object} [execaOpts] Options to pass to `execa`.
*/
async function gitAddConfig(name, value, execaOptions) {
export async function gitAddConfig(name, value, execaOptions) {
await execa('git', ['config', '--add', name, value], execaOptions);
}
@ -227,7 +227,7 @@ async function gitAddConfig(name, value, execaOptions) {
*
* @return {String} The sha of the commit associated with `tagName` on the local repository.
*/
async function gitTagHead(tagName, execaOptions) {
export async function gitTagHead(tagName, execaOptions) {
return (await execa('git', ['rev-list', '-1', tagName], execaOptions)).stdout;
}
@ -240,7 +240,7 @@ async function gitTagHead(tagName, execaOptions) {
*
* @return {String} The sha of the commit associated with `tagName` on the remote repository.
*/
async function gitRemoteTagHead(repositoryUrl, tagName, execaOptions) {
export async function gitRemoteTagHead(repositoryUrl, tagName, execaOptions) {
return (await execa('git', ['ls-remote', '--tags', repositoryUrl, tagName], execaOptions)).stdout
.split('\n')
.filter((tag) => Boolean(tag))
@ -255,7 +255,7 @@ async function gitRemoteTagHead(repositoryUrl, tagName, execaOptions) {
*
* @return {String} The tag associatedwith the sha in parameter or `null`.
*/
async function gitCommitTag(gitHead, execaOptions) {
export async function gitCommitTag(gitHead, execaOptions) {
return (await execa('git', ['describe', '--tags', '--exact-match', gitHead], execaOptions)).stdout;
}
@ -268,7 +268,7 @@ async function gitCommitTag(gitHead, execaOptions) {
*
* @throws {Error} if the push failed.
*/
async function gitPush(repositoryUrl, branch, execaOptions) {
export async function gitPush(repositoryUrl, branch, execaOptions) {
await execa('git', ['push', '--tags', repositoryUrl, `HEAD:${branch}`], execaOptions);
}
@ -278,7 +278,7 @@ async function gitPush(repositoryUrl, branch, execaOptions) {
* @param {String} ref The ref to merge.
* @param {Object} [execaOpts] Options to pass to `execa`.
*/
async function merge(ref, execaOptions) {
export async function merge(ref, execaOptions) {
await execa('git', ['merge', '--no-ff', ref], execaOptions);
}
@ -288,7 +288,7 @@ async function merge(ref, execaOptions) {
* @param {String} ref The ref to merge.
* @param {Object} [execaOpts] Options to pass to `execa`.
*/
async function mergeFf(ref, execaOptions) {
export async function mergeFf(ref, execaOptions) {
await execa('git', ['merge', '--ff', ref], execaOptions);
}
@ -298,7 +298,7 @@ async function mergeFf(ref, execaOptions) {
* @param {String} ref The ref to merge.
* @param {Object} [execaOpts] Options to pass to `execa`.
*/
async function rebase(ref, execaOptions) {
export async function rebase(ref, execaOptions) {
await execa('git', ['rebase', ref], execaOptions);
}
@ -309,7 +309,7 @@ async function rebase(ref, execaOptions) {
* @param {String} ref The ref to add the note to.
* @param {Object} [execaOpts] Options to pass to `execa`.
*/
async function gitAddNote(note, ref, execaOptions) {
export async function gitAddNote(note, ref, execaOptions) {
await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'add', '-m', note, ref], execaOptions);
}
@ -319,31 +319,6 @@ async function gitAddNote(note, ref, execaOptions) {
* @param {String} ref The ref to get the note from.
* @param {Object} [execaOpts] Options to pass to `execa`.
*/
async function gitGetNote(ref, execaOptions) {
export async function gitGetNote(ref, execaOptions) {
return (await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'show', ref], execaOptions)).stdout;
}
module.exports = {
initGit,
gitRepo,
initBareRepo,
gitCommits,
gitGetCommits,
gitCheckout,
gitFetch,
gitHead,
gitTagVersion,
gitShallowClone,
gitDetachedHead,
gitDetachedHeadFromBranch,
gitAddConfig,
gitTagHead,
gitRemoteTagHead,
gitCommitTag,
gitPush,
merge,
mergeFf,
rebase,
gitAddNote,
gitGetNote,
};

View File

@ -1,7 +1,7 @@
const Docker = require('dockerode');
const getStream = require('get-stream');
const pRetry = require('p-retry');
const {initBareRepo, gitShallowClone} = require('./git-utils');
import Docker from 'dockerode';
import getStream from 'get-stream';
import pRetry from 'p-retry';
import {gitShallowClone, initBareRepo} from './git-utils.js';
const IMAGE = 'semanticrelease/docker-gitbox:latest';
const SERVER_PORT = 80;
@ -12,12 +12,12 @@ const GIT_PASSWORD = 'suchsecure';
const docker = new Docker();
let container;
const gitCredential = `${GIT_USERNAME}:${GIT_PASSWORD}`;
export const gitCredential = `${GIT_USERNAME}:${GIT_PASSWORD}`;
/**
* Download the `gitbox` Docker image, create a new container and start it.
*/
async function start() {
export async function start() {
await getStream(await docker.pull(IMAGE));
container = await docker.createContainer({
@ -38,7 +38,7 @@ async function start() {
/**
* Stop and remote the `mockserver` Docker container.
*/
async function stop() {
export async function stop() {
await container.stop();
await container.remove();
}
@ -51,7 +51,7 @@ async function stop() {
* @param {String} [description=`Repository ${name}`] The repository description.
* @return {Object} The `repositoryUrl` (URL without auth) and `authUrl` (URL with auth).
*/
async function createRepo(name, branch = 'master', description = `Repository ${name}`) {
export async function createRepo(name, branch = 'master', description = `Repository ${name}`) {
const exec = await container.exec({
Cmd: ['repo-admin', '-n', name, '-d', description],
AttachStdout: true,
@ -68,5 +68,3 @@ async function createRepo(name, branch = 'master', description = `Repository ${n
return {cwd, repositoryUrl, authUrl};
}
module.exports = {start, stop, gitCredential, createRepo};

View File

@ -1,8 +1,8 @@
const Docker = require('dockerode');
const getStream = require('get-stream');
const got = require('got');
const pRetry = require('p-retry');
const {mockServerClient} = require('mockserver-client');
import Docker from 'dockerode';
import getStream from 'get-stream';
import got from 'got';
import pRetry from 'p-retry';
import {mockServerClient} from 'mockserver-client';
const IMAGE = 'mockserver/mockserver:latest';
const MOCK_SERVER_PORT = 1080;
@ -13,7 +13,7 @@ let container;
/**
* Download the `mockserver` Docker image, create a new container and start it.
*/
async function start() {
export async function start() {
await getStream(await docker.pull(IMAGE));
container = await docker.createContainer({
@ -38,7 +38,7 @@ async function start() {
/**
* Stop and remove the `mockserver` Docker container.
*/
async function stop() {
export async function stop() {
await container.stop();
await container.remove();
}
@ -50,7 +50,7 @@ const client = mockServerClient(MOCK_SERVER_HOST, MOCK_SERVER_PORT);
/**
* @type {string} the url of the `mockserver` instance
*/
const url = `http://${MOCK_SERVER_HOST}:${MOCK_SERVER_PORT}`;
export const url = `http://${MOCK_SERVER_HOST}:${MOCK_SERVER_PORT}`;
/**
* Set up the `mockserver` instance response for a specific request.
@ -65,7 +65,7 @@ const url = `http://${MOCK_SERVER_HOST}:${MOCK_SERVER_PORT}`;
* @param {Object} response.body The JSON object to respond in the response body.
* @return {Object} An object representation the expectation. Pass to the `verify` function to validate the `mockserver` has been called with a `request` matching the expectations.
*/
async function mock(
export async function mock(
path,
{body: requestBody, headers: requestHeaders},
{method = 'POST', statusCode = 200, body: responseBody}
@ -96,8 +96,6 @@ async function mock(
* @param {Object} expectation The expectation created with `mock` function.
* @return {Promise} A Promise that resolves if the expectation is met or reject otherwise.
*/
function verify(expectation) {
export function verify(expectation) {
return client.verify(expectation);
}
module.exports = {start, stop, mock, verify, url};

View File

@ -1,9 +1,10 @@
const Docker = require('dockerode');
const getStream = require('get-stream');
const got = require('got');
const path = require('path');
const delay = require('delay');
const pRetry = require('p-retry');
import path, {dirname} from 'node:path';
import {fileURLToPath} from 'node:url';
import Docker from 'dockerode';
import getStream from 'get-stream';
import got from 'got';
import delay from 'delay';
import pRetry from 'p-retry';
const IMAGE = 'verdaccio/verdaccio:4';
const REGISTRY_PORT = 4873;
@ -12,12 +13,13 @@ const NPM_USERNAME = 'integration';
const NPM_PASSWORD = 'suchsecure';
const NPM_EMAIL = 'integration@test.com';
const docker = new Docker();
const __dirname = dirname(fileURLToPath(import.meta.url));
let container;
/**
* Download the `npm-registry-docker` Docker image, create a new container and start it.
*/
async function start() {
export async function start() {
await getStream(await docker.pull(IMAGE));
container = await docker.createContainer({
@ -55,9 +57,9 @@ async function start() {
});
}
const url = `http://${REGISTRY_HOST}:${REGISTRY_PORT}/`;
export const url = `http://${REGISTRY_HOST}:${REGISTRY_PORT}/`;
const authEnv = {
export const authEnv = {
npm_config_registry: url, // eslint-disable-line camelcase
NPM_USERNAME,
NPM_PASSWORD,
@ -67,9 +69,7 @@ const authEnv = {
/**
* Stop and remote the `npm-registry-docker` Docker container.
*/
async function stop() {
export async function stop() {
await container.stop();
await container.remove();
}
module.exports = {start, stop, authEnv, url};

View File

@ -1,7 +1,5 @@
const execa = require('execa');
import {execa} from 'execa';
async function npmView(packageName, env) {
export async function npmView(packageName, env) {
return JSON.parse((await execa('npm', ['view', packageName, '--json'], {env})).stdout);
}
module.exports = {npmView};

View File

@ -1,7 +1,7 @@
const test = require('ava');
const {repeat} = require('lodash');
const hideSensitive = require('../lib/hide-sensitive');
const {SECRET_REPLACEMENT, SECRET_MIN_SIZE} = require('../lib/definitions/constants');
import test from 'ava';
import {repeat} from 'lodash-es';
import hideSensitive from '../lib/hide-sensitive.js';
import {SECRET_MIN_SIZE, SECRET_REPLACEMENT} from '../lib/definitions/constants.js';
test('Replace multiple sensitive environment variable values', (t) => {
const env = {SOME_PASSWORD: 'password', SOME_TOKEN: 'secret'};

View File

@ -1,29 +1,28 @@
const test = require('ava');
const {escapeRegExp, isString, sortBy, omit} = require('lodash');
const td = require('testdouble');
const {spy, stub} = require('sinon');
const {WritableStreamBuffer} = require('stream-buffers');
const AggregateError = require('aggregate-error');
const SemanticReleaseError = require('@semantic-release/error');
const {COMMIT_NAME, COMMIT_EMAIL, SECRET_REPLACEMENT} = require('../lib/definitions/constants');
const {
gitHead: getGitHead,
import test from 'ava';
import {escapeRegExp, isString, omit, sortBy} from 'lodash-es';
import * as td from 'testdouble';
import {spy, stub} from 'sinon';
import {WritableStreamBuffer} from 'stream-buffers';
import AggregateError from 'aggregate-error';
import SemanticReleaseError from '@semantic-release/error';
import {COMMIT_EMAIL, COMMIT_NAME, SECRET_REPLACEMENT} from '../lib/definitions/constants.js';
import {
gitAddNote,
gitCheckout,
gitTagHead,
gitRepo,
gitCommits,
gitTagVersion,
gitRemoteTagHead,
gitGetNote,
gitHead as getGitHead,
gitPush,
gitRemoteTagHead,
gitRepo,
gitShallowClone,
gitTagHead,
gitTagVersion,
merge,
mergeFf,
rebase,
gitAddNote,
gitGetNote,
} = require('./helpers/git-utils');
const pluginNoop = require.resolve('./fixtures/plugin-noop');
rebase
} from './helpers/git-utils.js';
import pluginNoop from './fixtures/plugin-noop.cjs';
test.beforeEach((t) => {
// Stub the logger functions
@ -143,9 +142,9 @@ test('Plugins are called with expected values', async (t) => {
{...nextRelease, notes: `${notes1}\n\n${notes2}\n\n${notes3}`, pluginName: pluginNoop},
];
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => envCi);
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => envCi);
const semanticRelease = (await import('../index.js')).default;
const result = await semanticRelease(options, {
cwd,
env,
@ -418,9 +417,9 @@ test('Use custom tag format', async (t) => {
fail: stub().resolves(),
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
t.truthy(
await semanticRelease(options, {
cwd,
@ -476,9 +475,9 @@ test('Use new gitHead, and recreate release notes if a prepare plugin create a c
fail: stub().resolves(),
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
t.truthy(
await semanticRelease(options, {
@ -542,9 +541,9 @@ test('Make a new release when a commit is forward-ported to an upper branch', as
success,
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
t.truthy(await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}}));
t.is(addChannel.callCount, 0);
@ -576,9 +575,9 @@ test('Publish a pre-release version', async (t) => {
fail: stub().resolves(),
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'beta', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'beta', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
let {releases} = await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}});
t.is(releases.length, 1);
@ -628,9 +627,9 @@ test('Publish releases from different branch on the same channel', async (t) =>
fail: stub().resolves(),
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'next', isPr: false}));
let semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'next', isPr: false}));
let semanticRelease = (await import('../index.js')).default;
let {releases} = await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}});
t.is(releases.length, 1);
@ -653,9 +652,9 @@ test('Publish releases from different branch on the same channel', async (t) =>
await merge('next', {cwd});
await gitPush('origin', 'master', {cwd});
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
semanticRelease = (await import('../index.js')).default;
t.falsy(await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}}));
t.is(addChannel.callCount, 0);
@ -686,9 +685,9 @@ test('Publish pre-releases the same channel as regular releases', async (t) => {
fail: stub().resolves(),
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'beta', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'beta', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
let {releases} = await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}});
t.is(releases.length, 1);
@ -751,9 +750,9 @@ test('Do not add pre-releases to a different channel', async (t) => {
success,
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
t.truthy(await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}}));
t.is(addChannel.callCount, 0);
@ -819,9 +818,9 @@ async function addChannelMacro(t, mergeFunction) {
gitHead: commits[2].hash,
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
const result = await semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}});
t.deepEqual(result.releases, [
@ -885,9 +884,9 @@ test('Call all "success" plugins even if one errors out', async (t) => {
success: [success1, success2],
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()})
@ -929,9 +928,9 @@ test('Log all "verifyConditions" errors', async (t) => {
fail,
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
const errors = [
...(await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()})
@ -973,9 +972,9 @@ test('Log all "verifyRelease" errors', async (t) => {
fail,
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
const errors = [
...(await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()})
@ -1026,9 +1025,9 @@ test('Dry-run skips addChannel, prepare, publish and success', async (t) => {
success,
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
t.truthy(
await semanticRelease(options, {
cwd,
@ -1078,9 +1077,9 @@ test('Dry-run skips fail', async (t) => {
fail,
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
const errors = [
...(await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()})
@ -1137,9 +1136,9 @@ test('Force a dry-run if not on a CI and "noCi" is not explicitly set', async (t
fail: stub().resolves(),
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: false, branch: 'master'}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: false, branch: 'master'}));
const semanticRelease = (await import('../index.js')).default;
t.truthy(
await semanticRelease(options, {
cwd,
@ -1186,9 +1185,9 @@ test('Dry-run does not print changelog if "generateNotes" return "undefined"', a
success: false,
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
t.truthy(
await semanticRelease(options, {
cwd,
@ -1244,9 +1243,9 @@ test('Allow local releases with "noCi" option', async (t) => {
fail: stub().resolves(),
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: false, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: false, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
t.truthy(
await semanticRelease(options, {
cwd,
@ -1313,9 +1312,9 @@ test('Accept "undefined" value returned by "generateNotes" and "false" by "publi
fail: stub().resolves(),
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
t.truthy(
await semanticRelease(options, {
cwd,
@ -1341,9 +1340,9 @@ test('Returns false if triggered by a PR', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const {cwd, repositoryUrl} = await gitRepo(true);
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', prBranch: 'patch-1', isPr: true}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', prBranch: 'patch-1', isPr: true}));
const semanticRelease = (await import('../index.js')).default;
t.false(
await semanticRelease(
@ -1393,9 +1392,9 @@ test('Throws "EINVALIDNEXTVERSION" if next release is out of range of the curren
success,
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: '1.x', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: '1.x', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
const error = await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}})
@ -1444,9 +1443,9 @@ test('Throws "EINVALIDNEXTVERSION" if next release is out of range of the curren
success,
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
const error = await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}})
@ -1503,9 +1502,9 @@ test('Throws "EINVALIDMAINTENANCEMERGE" if merge an out of range release in a ma
fail,
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: '1.1.x', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: '1.1.x', isPr: false}));
const semanticRelease = await import('../index.js');
const errors = [
...(await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: {write: () => {}}, stderr: {write: () => {}}})
@ -1539,9 +1538,9 @@ test('Returns false value if triggered on an outdated clone', async (t) => {
await gitCommits(['Third'], {cwd});
await gitPush(repositoryUrl, 'master', {cwd});
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
t.false(
await semanticRelease(
@ -1571,9 +1570,9 @@ test('Returns false if not running from the configured branch', async (t) => {
fail: stub().resolves(),
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'other-branch', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'other-branch', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
t.false(
await semanticRelease(options, {
@ -1615,9 +1614,9 @@ test('Returns false if there is no relevant changes', async (t) => {
fail: stub().resolves(),
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
t.false(
await semanticRelease(options, {
@ -1670,9 +1669,9 @@ test('Exclude commits with [skip release] or [release skip] from analysis', asyn
fail: stub().resolves(),
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
await semanticRelease(options, {
cwd,
env: {},
@ -1697,9 +1696,9 @@ test('Log both plugins errors and errors thrown by "fail" plugin', async (t) =>
verifyConditions: stub().rejects(pluginError),
fail: [stub().rejects(failError1), stub().rejects(failError2)],
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()})
@ -1721,9 +1720,9 @@ test('Call "fail" only if a plugin returns a SemanticReleaseError', async (t) =>
verifyConditions: stub().rejects(pluginError),
fail,
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()})
@ -1737,9 +1736,9 @@ test('Throw SemanticReleaseError if repositoryUrl is not set and cannot be found
// Create a git repository, set the current working directory at the root of the repo
const {cwd} = await gitRepo();
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
const errors = [
...(await t.throwsAsync(
semanticRelease({}, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()})
@ -1776,9 +1775,9 @@ test('Throw an Error if plugin returns an unexpected value', async (t) => {
fail: stub().resolves(),
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
const error = await t.throwsAsync(
semanticRelease(options, {cwd, env: {}, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()}),
{instanceOf: SemanticReleaseError}
@ -1805,9 +1804,9 @@ test('Hide sensitive information passed to "fail" plugin', async (t) => {
fail,
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
await t.throwsAsync(
semanticRelease(options, {cwd, env, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()})
);
@ -1849,9 +1848,9 @@ test('Hide sensitive information passed to "success" plugin', async (t) => {
fail: stub().resolves(),
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
await semanticRelease(options, {cwd, env, stdout: new WritableStreamBuffer(), stderr: new WritableStreamBuffer()});
const release = success.args[0][1].releases[0];
@ -1898,9 +1897,9 @@ test('Get all commits including the ones not in the shallow clone', async (t) =>
fail: stub().resolves(),
};
td.replace('../lib/get-logger', () => t.context.logger);
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
await td.replaceEsm('../lib/get-logger.js', null, () => t.context.logger);
await td.replaceEsm('env-ci', null, () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = (await import('../index.js')).default;
t.truthy(
await semanticRelease(options, {
cwd,

View File

@ -1,28 +1,31 @@
const path = require('path');
const test = require('ava');
const td = require('testdouble');
const {escapeRegExp} = require('lodash');
const {writeJson, readJson} = require('fs-extra');
const execa = require('execa');
const {WritableStreamBuffer} = require('stream-buffers');
const delay = require('delay');
const getAuthUrl = require('../lib/get-git-auth-url');
const {SECRET_REPLACEMENT} = require('../lib/definitions/constants');
const {
gitHead,
gitTagHead,
gitRepo,
gitCommits,
gitRemoteTagHead,
gitPush,
import path from 'path';
import test from 'ava';
import * as td from 'testdouble';
import {escapeRegExp} from 'lodash-es';
import fsExtra from 'fs-extra';
import {execa} from 'execa';
import {WritableStreamBuffer} from 'stream-buffers';
import delay from 'delay';
import getAuthUrl from '../lib/get-git-auth-url.js';
import {SECRET_REPLACEMENT} from '../lib/definitions/constants.js';
import {
gitCheckout,
merge,
gitCommits,
gitGetNote,
} = require('./helpers/git-utils');
const {npmView} = require('./helpers/npm-utils');
const gitbox = require('./helpers/gitbox');
const mockServer = require('./helpers/mockserver');
const npmRegistry = require('./helpers/npm-registry');
gitHead,
gitPush,
gitRemoteTagHead,
gitRepo,
gitTagHead,
merge
} from './helpers/git-utils.js';
import {npmView} from './helpers/npm-utils.js';
import * as gitbox from './helpers/gitbox.js';
import * as mockServer from './helpers/mockserver.js';
import * as npmRegistry from './helpers/npm-registry.js';
const {readJson, writeJson} = fsExtra;
/* eslint camelcase: ["error", {properties: "never"}] */
@ -47,10 +50,10 @@ const npmTestEnv = {
LEGACY_TOKEN: Buffer.from(`${env.NPM_USERNAME}:${env.NPM_PASSWORD}`, 'utf8').toString('base64'),
};
const cli = require.resolve('../bin/semantic-release');
const pluginError = require.resolve('./fixtures/plugin-error');
const pluginInheritedError = require.resolve('./fixtures/plugin-error-inherited');
const pluginLogEnv = require.resolve('./fixtures/plugin-log-env');
const cli = path.resolve('./bin/semantic-release.js');
const pluginError = path.resolve('./test/fixtures/plugin-error');
const pluginInheritedError = path.resolve('./test/fixtures/plugin-error-inherited');
const pluginLogEnv = path.resolve('./test/fixtures/plugin-log-env');
test.before(async () => {
await Promise.all([gitbox.start(), npmRegistry.start(), mockServer.start()]);
@ -509,7 +512,7 @@ test('Pass options via CLI arguments', async (t) => {
test('Run via JS API', async (t) => {
td.replace('../lib/logger', {log: () => {}, error: () => {}, stdout: () => {}});
td.replace('env-ci', () => ({isCi: true, branch: 'master', isPr: false}));
const semanticRelease = require('..');
const semanticRelease = (await import('../index.js')).default;
const packageName = 'test-js-api';
const owner = 'git';
// Create a git repository, set the current working directory at the root of the repo
@ -656,6 +659,8 @@ test('Hide sensitive environment variable values from the logs', async (t) => {
extendEnv: false,
});
console.log({stderr})
t.regex(stdout, new RegExp(`Console: Exposing token ${escapeRegExp(SECRET_REPLACEMENT)}`));
t.regex(stdout, new RegExp(`Log: Exposing token ${escapeRegExp(SECRET_REPLACEMENT)}`));
t.regex(stderr, new RegExp(`Error: Console token ${escapeRegExp(SECRET_REPLACEMENT)}`));

View File

@ -1,7 +1,7 @@
const test = require('ava');
const {noop} = require('lodash');
const {stub} = require('sinon');
const normalize = require('../../lib/plugins/normalize');
import test from 'ava';
import {noop} from 'lodash-es';
import {stub} from 'sinon';
import normalize from '../../lib/plugins/normalize.js';
const cwd = process.cwd();
@ -23,37 +23,37 @@ test('Normalize and load plugin from string', async (t) => {
const plugin = await normalize(
{cwd, options: {}, logger: t.context.logger},
'verifyConditions',
'./test/fixtures/plugin-noop',
'./test/fixtures/plugin-noop.cjs',
{}
);
t.is(plugin.pluginName, './test/fixtures/plugin-noop');
t.is(plugin.pluginName, './test/fixtures/plugin-noop.cjs');
t.is(typeof plugin, 'function');
t.deepEqual(t.context.success.args[0], ['Loaded plugin "verifyConditions" from "./test/fixtures/plugin-noop"']);
t.deepEqual(t.context.success.args[0], ['Loaded plugin "verifyConditions" from "./test/fixtures/plugin-noop.cjs"']);
});
test('Normalize and load plugin from object', async (t) => {
const plugin = await normalize(
{cwd, options: {}, logger: t.context.logger},
'publish',
{path: './test/fixtures/plugin-noop'},
{path: './test/fixtures/plugin-noop.cjs'},
{}
);
t.is(plugin.pluginName, './test/fixtures/plugin-noop');
t.is(plugin.pluginName, './test/fixtures/plugin-noop.cjs');
t.is(typeof plugin, 'function');
t.deepEqual(t.context.success.args[0], ['Loaded plugin "publish" from "./test/fixtures/plugin-noop"']);
t.deepEqual(t.context.success.args[0], ['Loaded plugin "publish" from "./test/fixtures/plugin-noop.cjs"']);
});
test('Normalize and load plugin from a base file path', async (t) => {
const plugin = await normalize({cwd, options: {}, logger: t.context.logger}, 'verifyConditions', './plugin-noop', {
'./plugin-noop': './test/fixtures',
const plugin = await normalize({cwd, options: {}, logger: t.context.logger}, 'verifyConditions', './plugin-noop.cjs', {
'./plugin-noop.cjs': './test/fixtures',
});
t.is(plugin.pluginName, './plugin-noop');
t.is(plugin.pluginName, './plugin-noop.cjs');
t.is(typeof plugin, 'function');
t.deepEqual(t.context.success.args[0], [
'Loaded plugin "verifyConditions" from "./plugin-noop" in shareable config "./test/fixtures"',
'Loaded plugin "verifyConditions" from "./plugin-noop.cjs" in shareable config "./test/fixtures"',
]);
});
@ -72,7 +72,7 @@ test('Wrap plugin in a function that add the "pluginName" to multiple errors"',
'./plugin-errors': './test/fixtures',
});
const errors = [...(await t.throwsAsync(plugin({options: {}})))];
const errors = [...(await t.throwsAsync(plugin({options: {}}))).errors];
for (const error of errors) {
t.is(error.pluginName, './plugin-errors');
}
@ -90,12 +90,12 @@ test('Normalize and load plugin that retuns multiple functions', async (t) => {
const plugin = await normalize(
{cwd, options: {}, logger: t.context.logger},
'verifyConditions',
'./test/fixtures/multi-plugin',
'./test/fixtures/multi-plugin.cjs',
{}
);
t.is(typeof plugin, 'function');
t.deepEqual(t.context.success.args[0], ['Loaded plugin "verifyConditions" from "./test/fixtures/multi-plugin"']);
t.deepEqual(t.context.success.args[0], ['Loaded plugin "verifyConditions" from "./test/fixtures/multi-plugin.cjs"']);
});
test('Wrap "analyzeCommits" plugin in a function that validate the output of the plugin', async (t) => {
@ -258,7 +258,7 @@ test('Always pass a defined "pluginConfig" for plugin defined with path', async
test('Throws an error if the plugin return an object without the expected plugin function', async (t) => {
const error = await t.throwsAsync(() =>
normalize({cwd, options: {}, logger: t.context.logger}, 'inexistantPlugin', './test/fixtures/multi-plugin', {})
normalize({cwd, options: {}, logger: t.context.logger}, 'nonExistentPlugin', './test/fixtures/multi-plugin.cjs', {})
);
t.is(error.code, 'EPLUGIN');
@ -269,7 +269,7 @@ test('Throws an error if the plugin return an object without the expected plugin
test('Throws an error if the plugin is not found', async (t) => {
await t.throwsAsync(
() => normalize({cwd, options: {}, logger: t.context.logger}, 'inexistantPlugin', 'non-existing-path', {}),
() => normalize({cwd, options: {}, logger: t.context.logger}, 'nonExistentPlugin', 'non-existing-path', {}),
{
message: /Cannot find module 'non-existing-path'/,
code: 'MODULE_NOT_FOUND',

View File

@ -1,7 +1,7 @@
const test = require('ava');
const {stub} = require('sinon');
const AggregateError = require('aggregate-error');
const pipeline = require('../../lib/plugins/pipeline');
import test from 'ava';
import {stub} from 'sinon';
import AggregateError from 'aggregate-error';
import pipeline from '../../lib/plugins/pipeline.js';
test('Execute each function in series passing the same input', async (t) => {
const step1 = stub().resolves(1);
@ -116,9 +116,9 @@ test('Throw all errors from the first step throwing an AggregateError', async (t
const step2 = stub().rejects(new AggregateError([error1, error2]));
const step3 = stub().resolves(3);
const errors = await t.throwsAsync(pipeline([step1, step2, step3])(0));
const error = await t.throwsAsync(pipeline([step1, step2, step3])(0));
t.deepEqual([...errors], [error1, error2]);
t.deepEqual([...error.errors], [error1, error2]);
t.true(step1.calledWith(0));
t.true(step2.calledWith(0));
t.true(step3.notCalled);
@ -131,9 +131,9 @@ test('Execute all even if a Promise rejects', async (t) => {
const step2 = stub().rejects(error1);
const step3 = stub().rejects(error2);
const errors = await t.throwsAsync(pipeline([step1, step2, step3], {settleAll: true})(0));
const error = await t.throwsAsync(pipeline([step1, step2, step3], {settleAll: true})(0));
t.deepEqual([...errors], [error1, error2]);
t.deepEqual([...error.errors], [error1, error2]);
t.true(step1.calledWith(0));
t.true(step2.calledWith(0));
t.true(step3.calledWith(0));
@ -147,9 +147,9 @@ test('Throw all errors from all steps throwing an AggregateError', async (t) =>
const step1 = stub().rejects(new AggregateError([error1, error2]));
const step2 = stub().rejects(new AggregateError([error3, error4]));
const errors = await t.throwsAsync(pipeline([step1, step2], {settleAll: true})(0));
const error = await t.throwsAsync(pipeline([step1, step2], {settleAll: true})(0));
t.deepEqual([...errors], [error1, error2, error3, error4]);
t.deepEqual([...error.errors], [error1, error2, error3, error4]);
t.true(step1.calledWith(0));
t.true(step2.calledWith(0));
});
@ -163,9 +163,9 @@ test('Execute each function in series passing a transformed input even if a step
const step4 = stub().resolves(4);
const getNextInput = (previousResult, result) => previousResult + result;
const errors = await t.throwsAsync(pipeline([step1, step2, step3, step4], {settleAll: true, getNextInput})(0));
const error = await t.throwsAsync(pipeline([step1, step2, step3, step4], {settleAll: true, getNextInput})(0));
t.deepEqual([...errors], [error2, error3]);
t.deepEqual([...error.errors], [error2, error3]);
t.true(step1.calledWith(0));
t.true(step2.calledWith(0 + 1));
t.true(step3.calledWith(0 + 1 + error2));

View File

@ -1,11 +1,11 @@
const path = require('path');
const test = require('ava');
const {copy, outputFile} = require('fs-extra');
const {stub} = require('sinon');
const tempy = require('tempy');
const getPlugins = require('../../lib/plugins');
import path from 'path';
import test from 'ava';
import {copy, outputFile} from 'fs-extra';
import {stub} from 'sinon';
import {temporaryDirectory} from 'tempy';
import getPlugins from '../../lib/plugins/index.js';
// Save the current working diretory
// Save the current working directory
const cwd = process.cwd();
test.beforeEach((t) => {
@ -35,9 +35,9 @@ test('Export plugins based on steps config', async (t) => {
cwd,
logger: t.context.logger,
options: {
verifyConditions: ['./test/fixtures/plugin-noop', {path: './test/fixtures/plugin-noop'}],
generateNotes: './test/fixtures/plugin-noop',
analyzeCommits: {path: './test/fixtures/plugin-noop'},
verifyConditions: ['./test/fixtures/plugin-noop.cjs', {path: './test/fixtures/plugin-noop.cjs'}],
generateNotes: './test/fixtures/plugin-noop.cjs',
analyzeCommits: {path: './test/fixtures/plugin-noop.cjs'},
verifyRelease: () => {},
},
},
@ -137,9 +137,9 @@ test('Unknown steps of plugins configured in "plugins" are ignored', async (t) =
});
test('Export plugins loaded from the dependency of a shareable config module', async (t) => {
const cwd = tempy.directory();
const cwd = temporaryDirectory();
await copy(
'./test/fixtures/plugin-noop.js',
'./test/fixtures/plugin-noop.cjs',
path.resolve(cwd, 'node_modules/shareable-config/node_modules/custom-plugin/index.js')
);
await outputFile(path.resolve(cwd, 'node_modules/shareable-config/index.js'), '');
@ -170,8 +170,8 @@ test('Export plugins loaded from the dependency of a shareable config module', a
});
test('Export plugins loaded from the dependency of a shareable config file', async (t) => {
const cwd = tempy.directory();
await copy('./test/fixtures/plugin-noop.js', path.resolve(cwd, 'plugin/plugin-noop.js'));
const cwd = temporaryDirectory();
await copy('./test/fixtures/plugin-noop.cjs', path.resolve(cwd, 'plugin/plugin-noop.cjs'));
await outputFile(path.resolve(cwd, 'shareable-config.js'), '');
const plugins = await getPlugins(
@ -179,9 +179,9 @@ test('Export plugins loaded from the dependency of a shareable config file', asy
cwd,
logger: t.context.logger,
options: {
verifyConditions: ['./plugin/plugin-noop', {path: './plugin/plugin-noop'}],
generateNotes: './plugin/plugin-noop',
analyzeCommits: {path: './plugin/plugin-noop'},
verifyConditions: ['./plugin/plugin-noop.cjs', {path: './plugin/plugin-noop.cjs'}],
generateNotes: './plugin/plugin-noop.cjs',
analyzeCommits: {path: './plugin/plugin-noop.cjs'},
verifyRelease: () => {},
},
},
@ -269,7 +269,7 @@ test('Throw an error for each invalid plugin configuration', async (t) => {
},
{}
)
)),
)).errors,
];
t.is(errors[0].name, 'SemanticReleaseError');
@ -289,11 +289,11 @@ test('Throw EPLUGINSCONF error if the "plugins" option contains an old plugin de
{
cwd,
logger: t.context.logger,
options: {plugins: ['./test/fixtures/multi-plugin', './test/fixtures/plugin-noop', () => {}]},
options: {plugins: ['./test/fixtures/multi-plugin.cjs', './test/fixtures/plugin-noop.cjs', () => {}]},
},
{}
)
)),
)).errors,
];
t.is(errors[0].name, 'SemanticReleaseError');
@ -306,7 +306,7 @@ test('Throw EPLUGINSCONF error for each invalid definition if the "plugins" opti
const errors = [
...(await t.throwsAsync(() =>
getPlugins({cwd, logger: t.context.logger, options: {plugins: [1, {path: 1}, [() => {}, {}, {}]]}}, {})
)),
)).errors,
];
t.is(errors[0].name, 'SemanticReleaseError');

View File

@ -1,5 +1,5 @@
const test = require('ava');
const {validatePlugin, validateStep, loadPlugin, parseConfig} = require('../../lib/plugins/utils');
import test from 'ava';
import {loadPlugin, parseConfig, validatePlugin, validateStep} from '../../lib/plugins/utils.js';
test('validatePlugin', (t) => {
const path = 'plugin-module';
@ -193,10 +193,10 @@ test('loadPlugin', async (t) => {
const cwd = process.cwd();
const func = () => {};
t.is(require('../fixtures/plugin-noop'), await loadPlugin({cwd: './test/fixtures'}, './plugin-noop', {}), 'From cwd');
t.is((await import('../fixtures/plugin-noop.cjs')).default, await loadPlugin({cwd: './test/fixtures'}, './plugin-noop.cjs', {}), 'From cwd');
t.is(
require('../fixtures/plugin-noop'),
await loadPlugin({cwd}, './plugin-noop', {'./plugin-noop': './test/fixtures'}),
(await import('../fixtures/plugin-noop.cjs')).default,
await loadPlugin({cwd}, './plugin-noop.cjs', {'./plugin-noop.cjs': './test/fixtures'}),
'From a shareable config context'
);
t.is(func, await loadPlugin({cwd}, func, {}), 'Defined as a function');

View File

@ -1,21 +1,21 @@
const test = require('ava');
const AggregateError = require('aggregate-error');
const {
import test from 'ava';
import AggregateError from 'aggregate-error';
import {
extractErrors,
tagsToVersions,
isMajorRange,
isMaintenanceRange,
getUpperBound,
getLowerBound,
highest,
lowest,
getLatestVersion,
getEarliestVersion,
getFirstVersion,
getLatestVersion,
getLowerBound,
getRange,
makeTag,
getUpperBound,
highest,
isMaintenanceRange,
isMajorRange,
isSameChannel,
} = require('../lib/utils');
lowest,
makeTag,
tagsToVersions
} from '../lib/utils.js';
test('extractErrors', (t) => {
const errors = [new Error('Error 1'), new Error('Error 2')];

View File

@ -1,13 +1,13 @@
const test = require('ava');
const tempy = require('tempy');
const verify = require('../lib/verify');
const {gitRepo} = require('./helpers/git-utils');
import test from 'ava';
import {temporaryDirectory} from 'tempy';
import verify from '../lib/verify.js';
import {gitRepo} from './helpers/git-utils.js';
test('Throw a AggregateError', async (t) => {
const {cwd} = await gitRepo();
const options = {branches: [{name: 'master'}, {name: ''}]};
const errors = [...(await t.throwsAsync(verify({cwd, options})))];
const errors = [...(await t.throwsAsync(verify({cwd, options}))).errors];
t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'ENOREPOURL');
@ -28,10 +28,10 @@ test('Throw a AggregateError', async (t) => {
});
test('Throw a SemanticReleaseError if does not run on a git repository', async (t) => {
const cwd = tempy.directory();
const cwd = temporaryDirectory();
const options = {branches: []};
const errors = [...(await t.throwsAsync(verify({cwd, options})))];
const errors = [...(await t.throwsAsync(verify({cwd, options}))).errors];
t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'ENOGITREPO');
@ -43,7 +43,7 @@ test('Throw a SemanticReleaseError if the "tagFormat" is not valid', async (t) =
const {cwd, repositoryUrl} = await gitRepo(true);
const options = {repositoryUrl, tagFormat: `?\${version}`, branches: []};
const errors = [...(await t.throwsAsync(verify({cwd, options})))];
const errors = [...(await t.throwsAsync(verify({cwd, options}))).errors];
t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'EINVALIDTAGFORMAT');
@ -55,7 +55,7 @@ test('Throw a SemanticReleaseError if the "tagFormat" does not contains the "ver
const {cwd, repositoryUrl} = await gitRepo(true);
const options = {repositoryUrl, tagFormat: 'test', branches: []};
const errors = [...(await t.throwsAsync(verify({cwd, options})))];
const errors = [...(await t.throwsAsync(verify({cwd, options}))).errors];
t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'ETAGNOVERSION');
@ -67,7 +67,7 @@ test('Throw a SemanticReleaseError if the "tagFormat" contains multiple "version
const {cwd, repositoryUrl} = await gitRepo(true);
const options = {repositoryUrl, tagFormat: `\${version}v\${version}`, branches: []};
const errors = [...(await t.throwsAsync(verify({cwd, options})))];
const errors = [...(await t.throwsAsync(verify({cwd, options}))).errors];
t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'ETAGNOVERSION');
@ -83,7 +83,7 @@ test('Throw a SemanticReleaseError for each invalid branch', async (t) => {
branches: [{name: ''}, {name: ' '}, {name: 1}, {}, {name: ''}, 1, 'master'],
};
const errors = [...(await t.throwsAsync(verify({cwd, options})))];
const errors = [...(await t.throwsAsync(verify({cwd, options}))).errors];
t.is(errors[0].name, 'SemanticReleaseError');
t.is(errors[0].code, 'EINVALIDBRANCH');