Compare commits

...

7 Commits

Author SHA1 Message Date
Gregor Martynus
86e8f013a7 test(unit): import ESM plugin with named exports 2023-01-14 13:58:42 -08:00
Gregor Martynus
f6c978cd9e fix: handle import of ESM plugins 2023-01-14 13:49:42 -08:00
Gregor Martynus
0ff4dee493 test(integration): ESM Plugin with named exports 2023-01-14 13:49:26 -08:00
Gregor Martynus
e36f0278f6 build(git-blame-ignore-revs): b06c9bbe4c6be121c5561b356d8c465c1cadffba 2023-01-13 15:56:51 -08:00
Gregor Martynus
b06c9bbe4c style: prettier 2023-01-13 15:55:40 -08:00
Gregor Martynus
12a981c020 build(package): lock file 2023-01-13 13:00:53 -08:00
Gregor Martynus
01c9caee0b fix(deps): bump @semantic-release/npm to ^10.0.0-beta.1 2023-01-13 13:00:52 -08:00
39 changed files with 1686 additions and 1453 deletions

2
.git-blame-ignore-revs Normal file
View File

@ -0,0 +1,2 @@
# style: prettier (#2670)
b06c9bbe4c6be121c5561b356d8c465c1cadffba

View File

@ -1,6 +1,6 @@
import {isString, mapValues, omit, remove, template} from 'lodash-es'; import { isString, mapValues, omit, remove, template } from "lodash-es";
import micromatch from 'micromatch'; import micromatch from "micromatch";
import {getBranches} from '../git.js'; import { getBranches } from "../git.js";
export default async (repositoryUrl, { cwd }, branches) => { export default async (repositoryUrl, { cwd }, branches) => {
const gitBranches = await getBranches(repositoryUrl, { cwd }); const gitBranches = await getBranches(repositoryUrl, { cwd });
@ -10,9 +10,9 @@ export default async (repositoryUrl, {cwd}, branches) => {
...branches, ...branches,
...remove(gitBranches, (name) => micromatch(gitBranches, branch.name).includes(name)).map((name) => ({ ...remove(gitBranches, (name) => micromatch(gitBranches, branch.name).includes(name)).map((name) => ({
name, name,
...mapValues(omit(branch, 'name'), (value) => (isString(value) ? template(value)({name}) : value)), ...mapValues(omit(branch, "name"), (value) => (isString(value) ? template(value)({ name }) : value)),
})), })),
], ],
[] []
); );
} };

View File

@ -1,18 +1,17 @@
import {escapeRegExp, template} from 'lodash-es'; import { escapeRegExp, template } from "lodash-es";
import semver from 'semver'; import semver from "semver";
import pReduce from 'p-reduce'; import pReduce from "p-reduce";
import debugTags from 'debug'; import debugTags from "debug";
import {getNote, getTags} from '../../lib/git.js'; import { getNote, getTags } from "../../lib/git.js";
const debug = debugTags('semantic-release:get-tags');
const debug = debugTags("semantic-release:get-tags");
export default async ({ cwd, env, options: { tagFormat } }, branches) => { export default async ({ cwd, env, options: { tagFormat } }, branches) => {
// Generate a regex to parse tags formatted with `tagFormat` // Generate a regex to parse tags formatted with `tagFormat`
// by replacing the `version` variable in the template by `(.+)`. // by replacing the `version` variable in the template by `(.+)`.
// The `tagFormat` is compiled with space as the `version` as it's an invalid tag character, // The `tagFormat` is compiled with space as the `version` as it's an invalid tag character,
// so it's guaranteed to no be present in the `tagFormat`. // so it's guaranteed to no be present in the `tagFormat`.
const tagRegexp = `^${escapeRegExp(template(tagFormat)({version: ' '})).replace(' ', '(.+)')}`; const tagRegexp = `^${escapeRegExp(template(tagFormat)({ version: " " })).replace(" ", "(.+)")}`;
return pReduce( return pReduce(
branches, branches,
@ -28,9 +27,9 @@ export default async ({cwd, env, options: {tagFormat}}, branches) => {
[] []
); );
debug('found tags for branch %s: %o', branch.name, branchTags); debug("found tags for branch %s: %o", branch.name, branchTags);
return [...branches, { ...branch, tags: branchTags }]; return [...branches, { ...branch, tags: branchTags }];
}, },
[] []
); );
} };

View File

@ -1,12 +1,12 @@
import {isRegExp, isString} from 'lodash-es'; import { isRegExp, isString } from "lodash-es";
import AggregateError from 'aggregate-error'; import AggregateError from "aggregate-error";
import pEachSeries from 'p-each-series'; import pEachSeries from "p-each-series";
import * as DEFINITIONS from '../definitions/branches.js'; import * as DEFINITIONS from "../definitions/branches.js";
import getError from '../get-error.js'; import getError from "../get-error.js";
import {fetch, fetchNotes, verifyBranchName} from '../git.js'; import { fetch, fetchNotes, verifyBranchName } from "../git.js";
import expand from './expand.js'; import expand from "./expand.js";
import getTags from './get-tags.js'; import getTags from "./get-tags.js";
import * as normalize from './normalize.js'; import * as normalize from "./normalize.js";
export default async (repositoryUrl, ciBranch, context) => { export default async (repositoryUrl, ciBranch, context) => {
const { cwd, env } = context; const { cwd, env } = context;
@ -54,12 +54,12 @@ export default async (repositoryUrl, ciBranch, context) => {
.filter((_, idx, array) => array[idx] === array[idx + 1] && array[idx] !== array[idx - 1]); .filter((_, idx, array) => array[idx] === array[idx + 1] && array[idx] !== array[idx - 1]);
if (duplicates.length > 0) { if (duplicates.length > 0) {
errors.push(getError('EDUPLICATEBRANCHES', {duplicates})); errors.push(getError("EDUPLICATEBRANCHES", { duplicates }));
} }
await pEachSeries(branches, async (branch) => { await pEachSeries(branches, async (branch) => {
if (!(await verifyBranchName(branch.name))) { if (!(await verifyBranchName(branch.name))) {
errors.push(getError('EINVALIDBRANCHNAME', branch)); errors.push(getError("EINVALIDBRANCHNAME", branch));
} }
}); });
@ -68,4 +68,4 @@ export default async (repositoryUrl, ciBranch, context) => {
} }
return [...result.maintenance, ...result.release, ...result.prerelease]; return [...result.maintenance, ...result.release, ...result.prerelease];
} };

View File

@ -1,16 +1,17 @@
import {isNil, sortBy} from 'lodash-es'; import { isNil, sortBy } from "lodash-es";
import semverDiff from 'semver-diff'; import semverDiff from "semver-diff";
import {FIRST_RELEASE, RELEASE_TYPE} from '../definitions/constants.js'; import { FIRST_RELEASE, RELEASE_TYPE } from "../definitions/constants.js";
import { import {
getFirstVersion, getFirstVersion,
getLatestVersion, getLatestVersion,
getLowerBound, getRange, getLowerBound,
getRange,
getUpperBound, getUpperBound,
highest, highest,
isMajorRange, isMajorRange,
lowest, lowest,
tagsToVersions tagsToVersions,
} from '../utils.js'; } from "../utils.js";
export function maintenance({ maintenance, release }) { export function maintenance({ maintenance, release }) {
return sortBy( return sortBy(
@ -20,7 +21,7 @@ export function maintenance({maintenance, release}) {
range: range || name, range: range || name,
channel: isNil(channel) ? name : channel, channel: isNil(channel) ? name : channel,
})), })),
'range' "range"
).map(({ name, range, tags, ...rest }, idx, branches) => { ).map(({ name, range, tags, ...rest }, idx, branches) => {
const versions = tagsToVersions(tags); const versions = tagsToVersions(tags);
// Find the lower bound based on Maintenance branches // Find the lower bound based on Maintenance branches
@ -44,7 +45,7 @@ export function maintenance({maintenance, release}) {
const diff = semverDiff(min, max); const diff = semverDiff(min, max);
return { return {
...rest, ...rest,
type: 'maintenance', type: "maintenance",
name, name,
tags, tags,
range: getRange(min, max), range: getRange(min, max),
@ -79,7 +80,7 @@ export function release({release}) {
...rest, ...rest,
channel: idx === 0 ? channel : isNil(channel) ? name : channel, channel: idx === 0 ? channel : isNil(channel) ? name : channel,
tags, tags,
type: 'release', type: "release",
name, name,
range: getRange(lastVersion, bound), range: getRange(lastVersion, bound),
accept: bound ? RELEASE_TYPE.slice(0, RELEASE_TYPE.indexOf(diff)) : RELEASE_TYPE, accept: bound ? RELEASE_TYPE.slice(0, RELEASE_TYPE.indexOf(diff)) : RELEASE_TYPE,
@ -94,7 +95,7 @@ export function prerelease({prerelease}) {
return { return {
...rest, ...rest,
channel: isNil(channel) ? name : channel, channel: isNil(channel) ? name : channel,
type: 'prerelease', type: "prerelease",
name, name,
prerelease: preid, prerelease: preid,
tags, tags,

View File

@ -1,6 +1,6 @@
import {isNil, uniqBy} from 'lodash-es'; import { isNil, uniqBy } from "lodash-es";
import semver from 'semver'; import semver from "semver";
import {isMaintenanceRange} from '../utils.js'; import { isMaintenanceRange } from "../utils.js";
export const maintenance = { export const maintenance = {
filter: ({ name, range }) => (!isNil(range) && range !== false) || isMaintenanceRange(name), filter: ({ name, range }) => (!isNil(range) && range !== false) || isMaintenanceRange(name),
@ -12,7 +12,7 @@ export const prerelease = {
filter: ({ prerelease }) => !isNil(prerelease) && prerelease !== false, filter: ({ prerelease }) => !isNil(prerelease) && prerelease !== false,
branchValidator: ({ name, prerelease }) => branchValidator: ({ name, prerelease }) =>
Boolean(prerelease) && Boolean(semver.valid(`1.0.0-${prerelease === true ? name : prerelease}.1`)), Boolean(prerelease) && Boolean(semver.valid(`1.0.0-${prerelease === true ? name : prerelease}.1`)),
branchesValidator: (branches) => uniqBy(branches, 'prerelease').length === branches.length, branchesValidator: (branches) => uniqBy(branches, "prerelease").length === branches.length,
}; };
export const release = { export const release = {

View File

@ -1,17 +1,17 @@
export const RELEASE_TYPE = ['patch', 'minor', 'major']; export const RELEASE_TYPE = ["patch", "minor", "major"];
export const FIRST_RELEASE = '1.0.0'; export const FIRST_RELEASE = "1.0.0";
export const FIRSTPRERELEASE = '1'; export const FIRSTPRERELEASE = "1";
export const COMMIT_NAME = 'semantic-release-bot'; export const COMMIT_NAME = "semantic-release-bot";
export const COMMIT_EMAIL = 'semantic-release-bot@martynus.net'; export const COMMIT_EMAIL = "semantic-release-bot@martynus.net";
export const RELEASE_NOTES_SEPARATOR = '\n\n'; export const RELEASE_NOTES_SEPARATOR = "\n\n";
export const SECRET_REPLACEMENT = '[secure]'; export const SECRET_REPLACEMENT = "[secure]";
export const SECRET_MIN_SIZE = 5; export const SECRET_MIN_SIZE = 5;
export const GIT_NOTE_REF = 'semantic-release'; export const GIT_NOTE_REF = "semantic-release";

View File

@ -1,21 +1,21 @@
import {inspect} from 'node:util'; import { inspect } from "node:util";
import {createRequire} from 'node:module'; import { createRequire } from "node:module";
import {isString, toLower, trim} from 'lodash-es'; import { isString, toLower, trim } from "lodash-es";
import {RELEASE_TYPE} from './constants.js'; import { RELEASE_TYPE } from "./constants.js";
const require = createRequire(import.meta.url); const require = createRequire(import.meta.url);
const pkg = require('../../package.json'); const pkg = require("../../package.json");
const [homepage] = pkg.homepage.split('#'); const [homepage] = pkg.homepage.split("#");
const stringify = (object) => const stringify = (object) =>
isString(object) ? object : inspect(object, { breakLength: Infinity, depth: 2, maxArrayLength: 5 }); isString(object) ? object : inspect(object, { breakLength: Infinity, depth: 2, maxArrayLength: 5 });
const linkify = (file) => `${homepage}/blob/master/${file}`; const linkify = (file) => `${homepage}/blob/master/${file}`;
const wordsList = (words) => const wordsList = (words) =>
`${words.slice(0, -1).join(', ')}${words.length > 1 ? ` or ${words[words.length - 1]}` : trim(words[0])}`; `${words.slice(0, -1).join(", ")}${words.length > 1 ? ` or ${words[words.length - 1]}` : trim(words[0])}`;
export function ENOGITREPO({ cwd }) { export function ENOGITREPO({ cwd }) {
return { return {
message: 'Not running from a git repository.', message: "Not running from a git repository.",
details: `The \`semantic-release\` command must be executed from a Git repository. details: `The \`semantic-release\` command must be executed from a Git repository.
The current working directory is \`${cwd}\`. The current working directory is \`${cwd}\`.
@ -26,36 +26,36 @@ Please verify your CI configuration to make sure the \`semantic-release\` comman
export function ENOREPOURL() { export function ENOREPOURL() {
return { return {
message: 'The `repositoryUrl` option is required.', message: "The `repositoryUrl` option is required.",
details: `The [repositoryUrl option](${linkify( details: `The [repositoryUrl option](${linkify(
'docs/usage/configuration.md#repositoryurl' "docs/usage/configuration.md#repositoryurl"
)}) cannot be determined from the semantic-release configuration, the \`package.json\` nor the [git origin url](https://git-scm.com/book/en/v2/Git-Basics-Working-with-Remotes). )}) cannot be determined from the semantic-release configuration, the \`package.json\` nor the [git origin url](https://git-scm.com/book/en/v2/Git-Basics-Working-with-Remotes).
Please make sure to add the \`repositoryUrl\` to the [semantic-release configuration] (${linkify( Please make sure to add the \`repositoryUrl\` to the [semantic-release configuration] (${linkify(
'docs/usage/configuration.md' "docs/usage/configuration.md"
)}).`, )}).`,
}; };
} }
export function EGITNOPERMISSION({ options: { repositoryUrl }, branch: { name } }) { export function EGITNOPERMISSION({ options: { repositoryUrl }, branch: { name } }) {
return { return {
message: 'Cannot push to the Git repository.', message: "Cannot push to the Git repository.",
details: `**semantic-release** cannot push the version tag to the branch \`${name}\` on the remote Git repository with URL \`${repositoryUrl}\`. details: `**semantic-release** cannot push the version tag to the branch \`${name}\` on the remote Git repository with URL \`${repositoryUrl}\`.
This can be caused by: This can be caused by:
- a misconfiguration of the [repositoryUrl](${linkify('docs/usage/configuration.md#repositoryurl')}) option - a misconfiguration of the [repositoryUrl](${linkify("docs/usage/configuration.md#repositoryurl")}) option
- the repository being unavailable - the repository being unavailable
- or missing push permission for the user configured via the [Git credentials on your CI environment](${linkify( - or missing push permission for the user configured via the [Git credentials on your CI environment](${linkify(
'docs/usage/ci-configuration.md#authentication' "docs/usage/ci-configuration.md#authentication"
)})`, )})`,
}; };
} }
export function EINVALIDTAGFORMAT({ options: { tagFormat } }) { export function EINVALIDTAGFORMAT({ options: { tagFormat } }) {
return { return {
message: 'Invalid `tagFormat` option.', message: "Invalid `tagFormat` option.",
details: `The [tagFormat](${linkify( details: `The [tagFormat](${linkify(
'docs/usage/configuration.md#tagformat' "docs/usage/configuration.md#tagformat"
)}) must compile to a [valid Git reference](https://git-scm.com/docs/git-check-ref-format#_description). )}) must compile to a [valid Git reference](https://git-scm.com/docs/git-check-ref-format#_description).
Your configuration for the \`tagFormat\` option is \`${stringify(tagFormat)}\`.`, Your configuration for the \`tagFormat\` option is \`${stringify(tagFormat)}\`.`,
@ -64,9 +64,9 @@ Your configuration for the \`tagFormat\` option is \`${stringify(tagFormat)}\`.`
export function ETAGNOVERSION({ options: { tagFormat } }) { export function ETAGNOVERSION({ options: { tagFormat } }) {
return { return {
message: 'Invalid `tagFormat` option.', message: "Invalid `tagFormat` option.",
details: `The [tagFormat](${linkify( details: `The [tagFormat](${linkify(
'docs/usage/configuration.md#tagformat' "docs/usage/configuration.md#tagformat"
)}) option must contain the variable \`version\` exactly once. )}) option must contain the variable \`version\` exactly once.
Your configuration for the \`tagFormat\` option is \`${stringify(tagFormat)}\`.`, Your configuration for the \`tagFormat\` option is \`${stringify(tagFormat)}\`.`,
@ -77,7 +77,7 @@ export function EPLUGINCONF({type, required, pluginConf}) {
return { return {
message: `The \`${type}\` plugin configuration is invalid.`, message: `The \`${type}\` plugin configuration is invalid.`,
details: `The [${type} plugin configuration](${linkify(`docs/usage/plugins.md#${toLower(type)}-plugin`)}) ${ details: `The [${type} plugin configuration](${linkify(`docs/usage/plugins.md#${toLower(type)}-plugin`)}) ${
required ? 'is required and ' : '' required ? "is required and " : ""
} must be a single or an array of plugins definition. A plugin definition is an npm module name, optionally wrapped in an array with an object. } must be a single or an array of plugins definition. A plugin definition is an npm module name, optionally wrapped in an array with an object.
Your configuration for the \`${type}\` plugin is \`${stringify(pluginConf)}\`.`, Your configuration for the \`${type}\` plugin is \`${stringify(pluginConf)}\`.`,
@ -86,9 +86,9 @@ Your configuration for the \`${type}\` plugin is \`${stringify(pluginConf)}\`.`,
export function EPLUGINSCONF({ plugin }) { export function EPLUGINSCONF({ plugin }) {
return { return {
message: 'The `plugins` configuration is invalid.', message: "The `plugins` configuration is invalid.",
details: `The [plugins](${linkify( details: `The [plugins](${linkify(
'docs/usage/configuration.md#plugins' "docs/usage/configuration.md#plugins"
)}) option must be an array of plugin definitions. A plugin definition is an npm module name, optionally wrapped in an array with an object. )}) option must be an array of plugin definitions. A plugin definition is an npm module name, optionally wrapped in an array with an object.
The invalid configuration is \`${stringify(plugin)}\`.`, The invalid configuration is \`${stringify(plugin)}\`.`,
@ -103,17 +103,17 @@ export function EPLUGIN({pluginName, type}) {
The plugin \`${pluginName}\` doesn't have the property \`${type}\` and cannot be used for the \`${type}\` step. The plugin \`${pluginName}\` doesn't have the property \`${type}\` and cannot be used for the \`${type}\` step.
Please refer to the \`${pluginName}\` and [semantic-release plugins configuration](${linkify( Please refer to the \`${pluginName}\` and [semantic-release plugins configuration](${linkify(
'docs/usage/plugins.md' "docs/usage/plugins.md"
)}) documentation for more details.`, )}) documentation for more details.`,
}; };
} }
export function EANALYZECOMMITSOUTPUT({ result, pluginName }) { export function EANALYZECOMMITSOUTPUT({ result, pluginName }) {
return { return {
message: 'The `analyzeCommits` plugin returned an invalid value. It must return a valid semver release type.', message: "The `analyzeCommits` plugin returned an invalid value. It must return a valid semver release type.",
details: `The \`analyzeCommits\` plugin must return a valid [semver](https://semver.org) release type. The valid values are: ${RELEASE_TYPE.map( details: `The \`analyzeCommits\` plugin must return a valid [semver](https://semver.org) release type. The valid values are: ${RELEASE_TYPE.map(
(type) => `\`${type}\`` (type) => `\`${type}\``
).join(', ')}. ).join(", ")}.
The \`analyzeCommits\` function of the \`${pluginName}\` returned \`${stringify(result)}\` instead. The \`analyzeCommits\` function of the \`${pluginName}\` returned \`${stringify(result)}\` instead.
@ -121,15 +121,15 @@ We recommend to report the issue to the \`${pluginName}\` authors, providing the
- The **semantic-release** version: \`${pkg.version}\` - The **semantic-release** version: \`${pkg.version}\`
- The **semantic-release** logs from your CI job - The **semantic-release** logs from your CI job
- The value returned by the plugin: \`${stringify(result)}\` - The value returned by the plugin: \`${stringify(result)}\`
- A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify( - A link to the **semantic-release** plugin developer guide: [${linkify("docs/developer-guide/plugin.md")}](${linkify(
'docs/developer-guide/plugin.md' "docs/developer-guide/plugin.md"
)})`, )})`,
}; };
} }
export function EGENERATENOTESOUTPUT({ result, pluginName }) { export function EGENERATENOTESOUTPUT({ result, pluginName }) {
return { return {
message: 'The `generateNotes` plugin returned an invalid value. It must return a `String`.', message: "The `generateNotes` plugin returned an invalid value. It must return a `String`.",
details: `The \`generateNotes\` plugin must return a \`String\`. details: `The \`generateNotes\` plugin must return a \`String\`.
The \`generateNotes\` function of the \`${pluginName}\` returned \`${stringify(result)}\` instead. The \`generateNotes\` function of the \`${pluginName}\` returned \`${stringify(result)}\` instead.
@ -138,15 +138,15 @@ We recommend to report the issue to the \`${pluginName}\` authors, providing the
- The **semantic-release** version: \`${pkg.version}\` - The **semantic-release** version: \`${pkg.version}\`
- The **semantic-release** logs from your CI job - The **semantic-release** logs from your CI job
- The value returned by the plugin: \`${stringify(result)}\` - The value returned by the plugin: \`${stringify(result)}\`
- A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify( - A link to the **semantic-release** plugin developer guide: [${linkify("docs/developer-guide/plugin.md")}](${linkify(
'docs/developer-guide/plugin.md' "docs/developer-guide/plugin.md"
)})`, )})`,
}; };
} }
export function EPUBLISHOUTPUT({ result, pluginName }) { export function EPUBLISHOUTPUT({ result, pluginName }) {
return { return {
message: 'A `publish` plugin returned an invalid value. It must return an `Object`.', message: "A `publish` plugin returned an invalid value. It must return an `Object`.",
details: `The \`publish\` plugins must return an \`Object\`. details: `The \`publish\` plugins must return an \`Object\`.
The \`publish\` function of the \`${pluginName}\` returned \`${stringify(result)}\` instead. The \`publish\` function of the \`${pluginName}\` returned \`${stringify(result)}\` instead.
@ -155,15 +155,15 @@ We recommend to report the issue to the \`${pluginName}\` authors, providing the
- The **semantic-release** version: \`${pkg.version}\` - The **semantic-release** version: \`${pkg.version}\`
- The **semantic-release** logs from your CI job - The **semantic-release** logs from your CI job
- The value returned by the plugin: \`${stringify(result)}\` - The value returned by the plugin: \`${stringify(result)}\`
- A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify( - A link to the **semantic-release** plugin developer guide: [${linkify("docs/developer-guide/plugin.md")}](${linkify(
'docs/developer-guide/plugin.md' "docs/developer-guide/plugin.md"
)})`, )})`,
}; };
} }
export function EADDCHANNELOUTPUT({ result, pluginName }) { export function EADDCHANNELOUTPUT({ result, pluginName }) {
return { return {
message: 'A `addChannel` plugin returned an invalid value. It must return an `Object`.', message: "A `addChannel` plugin returned an invalid value. It must return an `Object`.",
details: `The \`addChannel\` plugins must return an \`Object\`. details: `The \`addChannel\` plugins must return an \`Object\`.
The \`addChannel\` function of the \`${pluginName}\` returned \`${stringify(result)}\` instead. The \`addChannel\` function of the \`${pluginName}\` returned \`${stringify(result)}\` instead.
@ -172,17 +172,17 @@ We recommend to report the issue to the \`${pluginName}\` authors, providing the
- The **semantic-release** version: \`${pkg.version}\` - The **semantic-release** version: \`${pkg.version}\`
- The **semantic-release** logs from your CI job - The **semantic-release** logs from your CI job
- The value returned by the plugin: \`${stringify(result)}\` - The value returned by the plugin: \`${stringify(result)}\`
- A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify( - A link to the **semantic-release** plugin developer guide: [${linkify("docs/developer-guide/plugin.md")}](${linkify(
'docs/developer-guide/plugin.md' "docs/developer-guide/plugin.md"
)})`, )})`,
}; };
} }
export function EINVALIDBRANCH({ branch }) { export function EINVALIDBRANCH({ branch }) {
return { return {
message: 'A branch is invalid in the `branches` configuration.', message: "A branch is invalid in the `branches` configuration.",
details: `Each branch in the [branches configuration](${linkify( details: `Each branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches' "docs/usage/configuration.md#branches"
)}) must be either a string, a regexp or an object with a \`name\` property. )}) must be either a string, a regexp or an object with a \`name\` property.
Your configuration for the problematic branch is \`${stringify(branch)}\`.`, Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
@ -191,9 +191,9 @@ Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
export function EINVALIDBRANCHNAME({ branch }) { export function EINVALIDBRANCHNAME({ branch }) {
return { return {
message: 'A branch name is invalid in the `branches` configuration.', message: "A branch name is invalid in the `branches` configuration.",
details: `Each branch in the [branches configuration](${linkify( details: `Each branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches' "docs/usage/configuration.md#branches"
)}) must be a [valid Git reference](https://git-scm.com/docs/git-check-ref-format#_description). )}) must be a [valid Git reference](https://git-scm.com/docs/git-check-ref-format#_description).
Your configuration for the problematic branch is \`${stringify(branch)}\`.`, Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
@ -202,9 +202,9 @@ Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
export function EDUPLICATEBRANCHES({ duplicates }) { export function EDUPLICATEBRANCHES({ duplicates }) {
return { return {
message: 'The `branches` configuration has duplicate branches.', message: "The `branches` configuration has duplicate branches.",
details: `Each branch in the [branches configuration](${linkify( details: `Each branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches' "docs/usage/configuration.md#branches"
)}) must havea unique name. )}) must havea unique name.
Your configuration contains duplicates for the following branch names: \`${stringify(duplicates)}\`.`, Your configuration contains duplicates for the following branch names: \`${stringify(duplicates)}\`.`,
@ -213,9 +213,9 @@ Your configuration contains duplicates for the following branch names: \`${strin
export function EMAINTENANCEBRANCH({ branch }) { export function EMAINTENANCEBRANCH({ branch }) {
return { return {
message: 'A maintenance branch is invalid in the `branches` configuration.', message: "A maintenance branch is invalid in the `branches` configuration.",
details: `Each maintenance branch in the [branches configuration](${linkify( details: `Each maintenance branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches' "docs/usage/configuration.md#branches"
)}) must have a \`range\` property formatted like \`N.x\`, \`N.x.x\` or \`N.N.x\` (\`N\` is a number). )}) must have a \`range\` property formatted like \`N.x\`, \`N.x.x\` or \`N.N.x\` (\`N\` is a number).
Your configuration for the problematic branch is \`${stringify(branch)}\`.`, Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
@ -224,9 +224,9 @@ Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
export function EMAINTENANCEBRANCHES({ branches }) { export function EMAINTENANCEBRANCHES({ branches }) {
return { return {
message: 'The maintenance branches are invalid in the `branches` configuration.', message: "The maintenance branches are invalid in the `branches` configuration.",
details: `Each maintenance branch in the [branches configuration](${linkify( details: `Each maintenance branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches' "docs/usage/configuration.md#branches"
)}) must have a unique \`range\` property. )}) must have a unique \`range\` property.
Your configuration for the problematic branches is \`${stringify(branches)}\`.`, Your configuration for the problematic branches is \`${stringify(branches)}\`.`,
@ -235,9 +235,9 @@ Your configuration for the problematic branches is \`${stringify(branches)}\`.`,
export function ERELEASEBRANCHES({ branches }) { export function ERELEASEBRANCHES({ branches }) {
return { return {
message: 'The release branches are invalid in the `branches` configuration.', message: "The release branches are invalid in the `branches` configuration.",
details: `A minimum of 1 and a maximum of 3 release branches are required in the [branches configuration](${linkify( details: `A minimum of 1 and a maximum of 3 release branches are required in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches' "docs/usage/configuration.md#branches"
)}). )}).
This may occur if your repository does not have a release branch, such as \`master\`. This may occur if your repository does not have a release branch, such as \`master\`.
@ -248,9 +248,9 @@ Your configuration for the problematic branches is \`${stringify(branches)}\`.`,
export function EPRERELEASEBRANCH({ branch }) { export function EPRERELEASEBRANCH({ branch }) {
return { return {
message: 'A pre-release branch configuration is invalid in the `branches` configuration.', message: "A pre-release branch configuration is invalid in the `branches` configuration.",
details: `Each pre-release branch in the [branches configuration](${linkify( details: `Each pre-release branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches' "docs/usage/configuration.md#branches"
)}) must have a \`prerelease\` property valid per the [Semantic Versioning Specification](https://semver.org/#spec-item-9). If the \`prerelease\` property is set to \`true\`, then the \`name\` property is used instead. )}) must have a \`prerelease\` property valid per the [Semantic Versioning Specification](https://semver.org/#spec-item-9). If the \`prerelease\` property is set to \`true\`, then the \`name\` property is used instead.
Your configuration for the problematic branch is \`${stringify(branch)}\`.`, Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
@ -259,9 +259,9 @@ Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
export function EPRERELEASEBRANCHES({ branches }) { export function EPRERELEASEBRANCHES({ branches }) {
return { return {
message: 'The pre-release branches are invalid in the `branches` configuration.', message: "The pre-release branches are invalid in the `branches` configuration.",
details: `Each pre-release branch in the [branches configuration](${linkify( details: `Each pre-release branch in the [branches configuration](${linkify(
'docs/usage/configuration.md#branches' "docs/usage/configuration.md#branches"
)}) must have a unique \`prerelease\` property. If the \`prerelease\` property is set to \`true\`, then the \`name\` property is used instead. )}) must have a unique \`prerelease\` property. If the \`prerelease\` property is set to \`true\`, then the \`name\` property is used instead.
Your configuration for the problematic branches is \`${stringify(branches)}\`.`, Your configuration for the problematic branches is \`${stringify(branches)}\`.`,
@ -273,16 +273,16 @@ export function EINVALIDNEXTVERSION({nextRelease: {version}, branch: {name, rang
message: `The release \`${version}\` on branch \`${name}\` cannot be published as it is out of range.`, message: `The release \`${version}\` on branch \`${name}\` cannot be published as it is out of range.`,
details: `Based on the releases published on other branches, only versions within the range \`${range}\` can be published from branch \`${name}\`. details: `Based on the releases published on other branches, only versions within the range \`${range}\` can be published from branch \`${name}\`.
The following commit${commits.length > 1 ? 's are' : ' is'} responsible for the invalid release: The following commit${commits.length > 1 ? "s are" : " is"} responsible for the invalid release:
${commits.map(({commit: {short}, subject}) => `- ${subject} (${short})`).join('\n')} ${commits.map(({ commit: { short }, subject }) => `- ${subject} (${short})`).join("\n")}
${ ${
commits.length > 1 ? 'Those commits' : 'This commit' commits.length > 1 ? "Those commits" : "This commit"
} should be moved to a valid branch with [git merge](https://git-scm.com/docs/git-merge) or [git cherry-pick](https://git-scm.com/docs/git-cherry-pick) and removed from branch \`${name}\` with [git revert](https://git-scm.com/docs/git-revert) or [git reset](https://git-scm.com/docs/git-reset). } should be moved to a valid branch with [git merge](https://git-scm.com/docs/git-merge) or [git cherry-pick](https://git-scm.com/docs/git-cherry-pick) and removed from branch \`${name}\` with [git revert](https://git-scm.com/docs/git-revert) or [git reset](https://git-scm.com/docs/git-reset).
A valid branch could be ${wordsList(validBranches.map(({ name }) => `\`${name}\``))}. A valid branch could be ${wordsList(validBranches.map(({ name }) => `\`${name}\``))}.
See the [workflow configuration documentation](${linkify('docs/usage/workflow-configuration.md')}) for more details.`, See the [workflow configuration documentation](${linkify("docs/usage/workflow-configuration.md")}) for more details.`,
}; };
} }
@ -293,6 +293,6 @@ export function EINVALIDMAINTENANCEMERGE({nextRelease: {channel, gitTag, version
The branch \`${name}\` head should be [reset](https://git-scm.com/docs/git-reset) to a previous commit so the commit with tag \`${gitTag}\` is removed from the branch history. The branch \`${name}\` head should be [reset](https://git-scm.com/docs/git-reset) to a previous commit so the commit with tag \`${gitTag}\` is removed from the branch history.
See the [workflow configuration documentation](${linkify('docs/usage/workflow-configuration.md')}) for more details.`, See the [workflow configuration documentation](${linkify("docs/usage/workflow-configuration.md")}) for more details.`,
}; };
} }

View File

@ -1,10 +1,10 @@
/* eslint require-atomic-updates: off */ /* eslint require-atomic-updates: off */
import {isPlainObject, isString} from 'lodash-es'; import { isPlainObject, isString } from "lodash-es";
import {getGitHead} from '../git.js'; import { getGitHead } from "../git.js";
import hideSensitive from '../hide-sensitive.js'; import hideSensitive from "../hide-sensitive.js";
import {hideSensitiveValues} from '../utils.js'; import { hideSensitiveValues } from "../utils.js";
import {RELEASE_NOTES_SEPARATOR, RELEASE_TYPE} from './constants.js'; import { RELEASE_NOTES_SEPARATOR, RELEASE_TYPE } from "./constants.js";
export default { export default {
verifyConditions: { verifyConditions: {
@ -13,7 +13,7 @@ export default {
pipelineConfig: () => ({ settleAll: true }), pipelineConfig: () => ({ settleAll: true }),
}, },
analyzeCommits: { analyzeCommits: {
default: ['@semantic-release/commit-analyzer'], default: ["@semantic-release/commit-analyzer"],
required: true, required: true,
dryRun: true, dryRun: true,
outputValidator: (output) => !output || RELEASE_TYPE.includes(output), outputValidator: (output) => !output || RELEASE_TYPE.includes(output),
@ -43,7 +43,7 @@ export default {
...context, ...context,
nextRelease: { nextRelease: {
...nextRelease, ...nextRelease,
notes: `${nextRelease.notes ? `${nextRelease.notes}${RELEASE_NOTES_SEPARATOR}` : ''}${notes}`, notes: `${nextRelease.notes ? `${nextRelease.notes}${RELEASE_NOTES_SEPARATOR}` : ""}${notes}`,
}, },
}), }),
}), }),

View File

@ -1,10 +1,10 @@
import {castArray, identity, isNil, isPlainObject, isString, omit} from 'lodash-es'; import { castArray, identity, isNil, isPlainObject, isString, omit } from "lodash-es";
import AggregateError from 'aggregate-error'; import AggregateError from "aggregate-error";
import getError from '../get-error.js'; import getError from "../get-error.js";
import PLUGINS_DEFINITIONS from '../definitions/plugins.js'; import PLUGINS_DEFINITIONS from "../definitions/plugins.js";
import {loadPlugin, parseConfig, validatePlugin, validateStep} from './utils.js'; import { loadPlugin, parseConfig, validatePlugin, validateStep } from "./utils.js";
import pipeline from './pipeline.js'; import pipeline from "./pipeline.js";
import normalize from './normalize.js'; import normalize from "./normalize.js";
export default async (context, pluginsPath) => { export default async (context, pluginsPath) => {
let { options, logger } = context; let { options, logger } = context;
@ -20,8 +20,8 @@ export default async (context, pluginsPath) => {
if (isPlainObject(plugin)) { if (isPlainObject(plugin)) {
Object.entries(plugin).forEach(([type, func]) => { Object.entries(plugin).forEach(([type, func]) => {
if (PLUGINS_DEFINITIONS[type]) { if (PLUGINS_DEFINITIONS[type]) {
Reflect.defineProperty(func, 'pluginName', { Reflect.defineProperty(func, "pluginName", {
value: isPlainObject(name) ? 'Inline plugin' : name, value: isPlainObject(name) ? "Inline plugin" : name,
writable: false, writable: false,
enumerable: true, enumerable: true,
}); });
@ -29,10 +29,10 @@ export default async (context, pluginsPath) => {
} }
}); });
} else { } else {
errors.push(getError('EPLUGINSCONF', {plugin})); errors.push(getError("EPLUGINSCONF", { plugin }));
} }
} else { } else {
errors.push(getError('EPLUGINSCONF', {plugin})); errors.push(getError("EPLUGINSCONF", { plugin }));
} }
return pluginsList; return pluginsList;
@ -64,7 +64,7 @@ export default async (context, pluginsPath) => {
} }
if (!validateStep({ required }, options[type])) { if (!validateStep({ required }, options[type])) {
errors.push(getError('EPLUGINCONF', {type, required, pluginConf: options[type]})); errors.push(getError("EPLUGINCONF", { type, required, pluginConf: options[type] }));
return pluginsConfigAccumulator; return pluginsConfigAccumulator;
} }
@ -74,7 +74,7 @@ export default async (context, pluginsPath) => {
const steps = await Promise.all( const steps = await Promise.all(
castArray(pluginOptions).map(async (pluginOpt) => castArray(pluginOptions).map(async (pluginOpt) =>
normalize( normalize(
{...context, options: omit(options, Object.keys(PLUGINS_DEFINITIONS), 'plugins')}, { ...context, options: omit(options, Object.keys(PLUGINS_DEFINITIONS), "plugins") },
type, type,
pluginOpt, pluginOpt,
pluginsPath pluginsPath
@ -100,4 +100,4 @@ export default async (context, pluginsPath) => {
} }
return pluginsConfig; return pluginsConfig;
} };

View File

@ -1,11 +1,11 @@
import {cloneDeep, isFunction, isPlainObject, noop, omit} from 'lodash-es'; import { cloneDeep, isFunction, isPlainObject, noop, omit } from "lodash-es";
import debugPlugins from 'debug'; import debugPlugins from "debug";
import getError from '../get-error.js'; import getError from "../get-error.js";
import {extractErrors} from '../utils.js'; import { extractErrors } from "../utils.js";
import PLUGINS_DEFINITIONS from '../definitions/plugins.js'; import PLUGINS_DEFINITIONS from "../definitions/plugins.js";
import {loadPlugin, parseConfig} from './utils.js'; import { loadPlugin, parseConfig } from "./utils.js";
const debug = debugPlugins('semantic-release:plugins'); const debug = debugPlugins("semantic-release:plugins");
export default async (context, type, pluginOpt, pluginsPath) => { export default async (context, type, pluginOpt, pluginsPath) => {
const { stdout, stderr, options, logger } = context; const { stdout, stderr, options, logger } = context;
@ -25,7 +25,7 @@ export default async (context, type, pluginOpt, pluginsPath) => {
} else if (isPlainObject(plugin) && plugin[type] && isFunction(plugin[type])) { } else if (isPlainObject(plugin) && plugin[type] && isFunction(plugin[type])) {
func = plugin[type].bind(null, cloneDeep({ ...options, ...config })); func = plugin[type].bind(null, cloneDeep({ ...options, ...config }));
} else { } else {
throw getError('EPLUGIN', {type, pluginName}); throw getError("EPLUGIN", { type, pluginName });
} }
const validator = async (input) => { const validator = async (input) => {
@ -34,7 +34,7 @@ export default async (context, type, pluginOpt, pluginsPath) => {
if (!input.options.dryRun || dryRun) { if (!input.options.dryRun || dryRun) {
logger.log(`Start step "${type}" of plugin "${pluginName}"`); logger.log(`Start step "${type}" of plugin "${pluginName}"`);
const result = await func({ const result = await func({
...cloneDeep(omit(input, ['stdout', 'stderr', 'logger'])), ...cloneDeep(omit(input, ["stdout", "stderr", "logger"])),
stdout, stdout,
stderr, stderr,
logger: logger.scope(logger.scopeName, pluginName), logger: logger.scope(logger.scopeName, pluginName),
@ -55,7 +55,7 @@ export default async (context, type, pluginOpt, pluginsPath) => {
} }
}; };
Reflect.defineProperty(validator, 'pluginName', {value: pluginName, writable: false, enumerable: true}); Reflect.defineProperty(validator, "pluginName", { value: pluginName, writable: false, enumerable: true });
if (!isFunction(pluginOpt)) { if (!isFunction(pluginOpt)) {
if (pluginsPath[name]) { if (pluginsPath[name]) {
@ -66,4 +66,4 @@ export default async (context, type, pluginOpt, pluginsPath) => {
} }
return validator; return validator;
} };

View File

@ -1,7 +1,7 @@
import {identity} from 'lodash-es'; import { identity } from "lodash-es";
import pReduce from 'p-reduce'; import pReduce from "p-reduce";
import AggregateError from 'aggregate-error'; import AggregateError from "aggregate-error";
import {extractErrors} from '../utils.js'; import { extractErrors } from "../utils.js";
/** /**
* A Function that execute a list of function sequencially. If at least one Function ins the pipeline throws an Error or rejects, the pipeline function rejects as well. * A Function that execute a list of function sequencially. If at least one Function ins the pipeline throws an Error or rejects, the pipeline function rejects as well.
@ -25,7 +25,8 @@ import {extractErrors} from '../utils.js';
* *
* @return {Pipeline} A Function that execute the `steps` sequentially * @return {Pipeline} A Function that execute the `steps` sequentially
*/ */
export default (steps, {settleAll = false, getNextInput = identity, transform = identity} = {}) => async (input) => { export default (steps, { settleAll = false, getNextInput = identity, transform = identity } = {}) =>
async (input) => {
const results = []; const results = [];
const errors = []; const errors = [];
await pReduce( await pReduce(
@ -55,4 +56,4 @@ export default (steps, {settleAll = false, getNextInput = identity, transform =
} }
return results; return results;
} };

View File

@ -1,7 +1,7 @@
import {dirname} from 'node:path'; import { dirname } from "node:path";
import {fileURLToPath} from 'node:url'; import { fileURLToPath } from "node:url";
import {castArray, isArray, isFunction, isNil, isPlainObject, isString} from 'lodash-es'; import { castArray, isArray, isFunction, isNil, isPlainObject, isString } from "lodash-es";
import resolveFrom from 'resolve-from'; import resolveFrom from "resolve-from";
const __dirname = dirname(fileURLToPath(import.meta.url)); const __dirname = dirname(fileURLToPath(import.meta.url));
@ -53,8 +53,19 @@ export async function loadPlugin({cwd}, name, pluginsPath) {
: __dirname; : __dirname;
// See https://github.com/mysticatea/eslint-plugin-node/issues/250 // See https://github.com/mysticatea/eslint-plugin-node/issues/250
// eslint-disable-next-line node/no-unsupported-features/es-syntax if (isFunction(name)) {
return isFunction(name) ? name : (await import(resolveFrom.silent(basePath, name) || resolveFrom(cwd, name))).default; return name;
}
const { default: cjsExport, ...esmNamedExports } = await import(
resolveFrom.silent(basePath, name) || resolveFrom(cwd, name)
);
if (cjsExport) {
return cjsExport;
}
return esmNamedExports;
} }
export function parseConfig(plugin) { export function parseConfig(plugin) {

361
package-lock.json generated
View File

@ -12,7 +12,7 @@
"@semantic-release/commit-analyzer": "^9.0.2", "@semantic-release/commit-analyzer": "^9.0.2",
"@semantic-release/error": "^3.0.0", "@semantic-release/error": "^3.0.0",
"@semantic-release/github": "^8.0.0", "@semantic-release/github": "^8.0.0",
"@semantic-release/npm": "^9.0.0", "@semantic-release/npm": "^10.0.0-beta.1",
"@semantic-release/release-notes-generator": "^10.0.0", "@semantic-release/release-notes-generator": "^10.0.0",
"aggregate-error": "^4.0.1", "aggregate-error": "^4.0.1",
"cosmiconfig": "^8.0.0", "cosmiconfig": "^8.0.0",
@ -286,6 +286,29 @@
"@octokit/openapi-types": "^10.2.2" "@octokit/openapi-types": "^10.2.2"
} }
}, },
"node_modules/@pnpm/network.ca-file": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@pnpm/network.ca-file/-/network.ca-file-1.0.2.tgz",
"integrity": "sha512-YcPQ8a0jwYU9bTdJDpXjMi7Brhkr1mXsXrUJvjqM2mQDgkRiz8jFaQGOdaLxgjtUfQgZhKy/O3cG/YwmgKaxLA==",
"dependencies": {
"graceful-fs": "4.2.10"
},
"engines": {
"node": ">=12.22.0"
}
},
"node_modules/@pnpm/npm-conf": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/@pnpm/npm-conf/-/npm-conf-1.0.5.tgz",
"integrity": "sha512-hD8ml183638O3R6/Txrh0L8VzGOrFXgRtRDG4qQC4tONdZ5Z1M+tlUUDUvrjYdmK6G+JTBTeaCLMna11cXzi8A==",
"dependencies": {
"@pnpm/network.ca-file": "^1.0.1",
"config-chain": "^1.1.11"
},
"engines": {
"node": ">=12"
}
},
"node_modules/@semantic-release/commit-analyzer": { "node_modules/@semantic-release/commit-analyzer": {
"version": "9.0.2", "version": "9.0.2",
"resolved": "https://registry.npmjs.org/@semantic-release/commit-analyzer/-/commit-analyzer-9.0.2.tgz", "resolved": "https://registry.npmjs.org/@semantic-release/commit-analyzer/-/commit-analyzer-9.0.2.tgz",
@ -410,26 +433,26 @@
} }
}, },
"node_modules/@semantic-release/npm": { "node_modules/@semantic-release/npm": {
"version": "9.0.0", "version": "10.0.0-beta.1",
"resolved": "https://registry.npmjs.org/@semantic-release/npm/-/npm-9.0.0.tgz", "resolved": "https://registry.npmjs.org/@semantic-release/npm/-/npm-10.0.0-beta.1.tgz",
"integrity": "sha512-hj2jqayS2SPUmFtCMCOQMX975uMDfRoymj1HvMSwYdaoI6hVZvhrTFPBgJeM85O0C+G3IFviAUar5gel/1VGDQ==", "integrity": "sha512-HbfiXZXz0D6yMCfiLXYyAStKie3hmHTH70pkth0s9HfLR8VXMVVCcODbbhzjVIZ5wO7ZR5SgOWzc6f9YpWUt1A==",
"dependencies": { "dependencies": {
"@semantic-release/error": "^3.0.0", "@semantic-release/error": "^3.0.0",
"aggregate-error": "^3.0.0", "aggregate-error": "^3.0.0",
"execa": "^5.0.0", "execa": "^5.0.0",
"fs-extra": "^10.0.0", "fs-extra": "^11.0.0",
"lodash": "^4.17.15", "lodash-es": "^4.17.21",
"nerf-dart": "^1.0.0", "nerf-dart": "^1.0.0",
"normalize-url": "^6.0.0", "normalize-url": "^6.0.0",
"npm": "^8.3.0", "npm": "^8.3.0",
"rc": "^1.2.8", "rc": "^1.2.8",
"read-pkg": "^5.0.0", "read-pkg": "^7.0.0",
"registry-auth-token": "^4.0.0", "registry-auth-token": "^5.0.0",
"semver": "^7.1.2", "semver": "^7.1.2",
"tempy": "^1.0.0" "tempy": "^3.0.0"
}, },
"engines": { "engines": {
"node": ">=16 || ^14.17" "node": ">=18"
}, },
"peerDependencies": { "peerDependencies": {
"semantic-release": ">=19.0.0" "semantic-release": ">=19.0.0"
@ -477,19 +500,6 @@
"url": "https://github.com/sindresorhus/execa?sponsor=1" "url": "https://github.com/sindresorhus/execa?sponsor=1"
} }
}, },
"node_modules/@semantic-release/npm/node_modules/fs-extra": {
"version": "10.1.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz",
"integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==",
"dependencies": {
"graceful-fs": "^4.2.0",
"jsonfile": "^6.0.1",
"universalify": "^2.0.0"
},
"engines": {
"node": ">=12"
}
},
"node_modules/@semantic-release/npm/node_modules/human-signals": { "node_modules/@semantic-release/npm/node_modules/human-signals": {
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz",
@ -542,6 +552,23 @@
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
} }
}, },
"node_modules/@semantic-release/npm/node_modules/read-pkg": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-7.1.0.tgz",
"integrity": "sha512-5iOehe+WF75IccPc30bWTbpdDQLOCc3Uu8bi3Dte3Eueij81yx1Mrufk8qBx/YAbR4uL1FdUr+7BKXDwEtisXg==",
"dependencies": {
"@types/normalize-package-data": "^2.4.1",
"normalize-package-data": "^3.0.2",
"parse-json": "^5.2.0",
"type-fest": "^2.0.0"
},
"engines": {
"node": ">=12.20"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/@semantic-release/npm/node_modules/strip-final-newline": { "node_modules/@semantic-release/npm/node_modules/strip-final-newline": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz",
@ -550,30 +577,12 @@
"node": ">=6" "node": ">=6"
} }
}, },
"node_modules/@semantic-release/npm/node_modules/tempy": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/tempy/-/tempy-1.0.1.tgz",
"integrity": "sha512-biM9brNqxSc04Ee71hzFbryD11nX7VPhQQY32AdDmjFvodsRFz/3ufeoTZ6uYkRFfGo188tENcASNs3vTdsM0w==",
"dependencies": {
"del": "^6.0.0",
"is-stream": "^2.0.0",
"temp-dir": "^2.0.0",
"type-fest": "^0.16.0",
"unique-string": "^2.0.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/@semantic-release/npm/node_modules/type-fest": { "node_modules/@semantic-release/npm/node_modules/type-fest": {
"version": "0.16.0", "version": "2.19.0",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.16.0.tgz", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz",
"integrity": "sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg==", "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==",
"engines": { "engines": {
"node": ">=10" "node": ">=12.20"
}, },
"funding": { "funding": {
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
@ -1903,6 +1912,15 @@
"node": ">=10.18.0 <11 || >=12.14.0 <13 || >=14" "node": ">=10.18.0 <11 || >=12.14.0 <13 || >=14"
} }
}, },
"node_modules/config-chain": {
"version": "1.1.13",
"resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz",
"integrity": "sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==",
"dependencies": {
"ini": "^1.3.4",
"proto-list": "~1.2.1"
}
},
"node_modules/conventional-changelog-angular": { "node_modules/conventional-changelog-angular": {
"version": "5.0.13", "version": "5.0.13",
"resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-5.0.13.tgz", "resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-5.0.13.tgz",
@ -2051,6 +2069,7 @@
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz", "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz",
"integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==", "integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==",
"peer": true,
"engines": { "engines": {
"node": ">=8" "node": ">=8"
} }
@ -2153,6 +2172,7 @@
"version": "6.1.1", "version": "6.1.1",
"resolved": "https://registry.npmjs.org/del/-/del-6.1.1.tgz", "resolved": "https://registry.npmjs.org/del/-/del-6.1.1.tgz",
"integrity": "sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==", "integrity": "sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==",
"peer": true,
"dependencies": { "dependencies": {
"globby": "^11.0.1", "globby": "^11.0.1",
"graceful-fs": "^4.2.4", "graceful-fs": "^4.2.4",
@ -2174,6 +2194,7 @@
"version": "3.1.0", "version": "3.1.0",
"resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz",
"integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==",
"peer": true,
"dependencies": { "dependencies": {
"clean-stack": "^2.0.0", "clean-stack": "^2.0.0",
"indent-string": "^4.0.0" "indent-string": "^4.0.0"
@ -2186,6 +2207,7 @@
"version": "2.2.0", "version": "2.2.0",
"resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz",
"integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==",
"peer": true,
"engines": { "engines": {
"node": ">=6" "node": ">=6"
} }
@ -2194,6 +2216,7 @@
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz",
"integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==",
"peer": true,
"dependencies": { "dependencies": {
"aggregate-error": "^3.0.0" "aggregate-error": "^3.0.0"
}, },
@ -2681,7 +2704,6 @@
"version": "11.1.0", "version": "11.1.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.1.0.tgz", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.1.0.tgz",
"integrity": "sha512-0rcTq621PD5jM/e0a3EJoGC/1TC5ZBCERW82LQuwfGnCa1V8w7dpYH1yNu+SLb6E5dkeCBzKEyLGlFrnr+dUyw==", "integrity": "sha512-0rcTq621PD5jM/e0a3EJoGC/1TC5ZBCERW82LQuwfGnCa1V8w7dpYH1yNu+SLb6E5dkeCBzKEyLGlFrnr+dUyw==",
"dev": true,
"dependencies": { "dependencies": {
"graceful-fs": "^4.2.0", "graceful-fs": "^4.2.0",
"jsonfile": "^6.0.1", "jsonfile": "^6.0.1",
@ -3391,6 +3413,7 @@
"version": "2.2.0", "version": "2.2.0",
"resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz", "resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz",
"integrity": "sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==", "integrity": "sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==",
"peer": true,
"engines": { "engines": {
"node": ">=6" "node": ">=6"
} }
@ -3399,6 +3422,7 @@
"version": "3.0.3", "version": "3.0.3",
"resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz",
"integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==",
"peer": true,
"engines": { "engines": {
"node": ">=8" "node": ">=8"
} }
@ -6918,6 +6942,11 @@
"node": ">= 8" "node": ">= 8"
} }
}, },
"node_modules/proto-list": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz",
"integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA=="
},
"node_modules/pump": { "node_modules/pump": {
"version": "3.0.0", "version": "3.0.0",
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
@ -7204,14 +7233,14 @@
} }
}, },
"node_modules/registry-auth-token": { "node_modules/registry-auth-token": {
"version": "4.2.1", "version": "5.0.1",
"resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.1.tgz", "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-5.0.1.tgz",
"integrity": "sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw==", "integrity": "sha512-UfxVOj8seK1yaIOiieV4FIP01vfBDLsY0H9sQzi9EbbUdJiuuBjJgLa1DpImXMNPnVkBD4eVxTEXcrZA6kfpJA==",
"dependencies": { "dependencies": {
"rc": "^1.2.8" "@pnpm/npm-conf": "^1.0.4"
}, },
"engines": { "engines": {
"node": ">=6.0.0" "node": ">=14"
} }
}, },
"node_modules/require-directory": { "node_modules/require-directory": {
@ -7380,6 +7409,33 @@
"node": ">=16 || ^14.17" "node": ">=16 || ^14.17"
} }
}, },
"node_modules/semantic-release/node_modules/@semantic-release/npm": {
"version": "9.0.2",
"resolved": "https://registry.npmjs.org/@semantic-release/npm/-/npm-9.0.2.tgz",
"integrity": "sha512-zgsynF6McdzxPnFet+a4iO9HpAlARXOM5adz7VGVCvj0ne8wtL2ZOQoDV2wZPDmdEotDIbVeJjafhelZjs9j6g==",
"peer": true,
"dependencies": {
"@semantic-release/error": "^3.0.0",
"aggregate-error": "^3.0.0",
"execa": "^5.0.0",
"fs-extra": "^11.0.0",
"lodash": "^4.17.15",
"nerf-dart": "^1.0.0",
"normalize-url": "^6.0.0",
"npm": "^8.3.0",
"rc": "^1.2.8",
"read-pkg": "^5.0.0",
"registry-auth-token": "^5.0.0",
"semver": "^7.1.2",
"tempy": "^1.0.0"
},
"engines": {
"node": ">=16 || ^14.17"
},
"peerDependencies": {
"semantic-release": ">=19.0.0"
}
},
"node_modules/semantic-release/node_modules/aggregate-error": { "node_modules/semantic-release/node_modules/aggregate-error": {
"version": "3.1.0", "version": "3.1.0",
"resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz",
@ -7654,6 +7710,37 @@
"node": ">=6" "node": ">=6"
} }
}, },
"node_modules/semantic-release/node_modules/tempy": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/tempy/-/tempy-1.0.1.tgz",
"integrity": "sha512-biM9brNqxSc04Ee71hzFbryD11nX7VPhQQY32AdDmjFvodsRFz/3ufeoTZ6uYkRFfGo188tENcASNs3vTdsM0w==",
"peer": true,
"dependencies": {
"del": "^6.0.0",
"is-stream": "^2.0.0",
"temp-dir": "^2.0.0",
"type-fest": "^0.16.0",
"unique-string": "^2.0.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/semantic-release/node_modules/tempy/node_modules/type-fest": {
"version": "0.16.0",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.16.0.tgz",
"integrity": "sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg==",
"peer": true,
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/semantic-release/node_modules/type-fest": { "node_modules/semantic-release/node_modules/type-fest": {
"version": "0.8.1", "version": "0.8.1",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz",
@ -8132,7 +8219,7 @@
"node_modules/strip-json-comments": { "node_modules/strip-json-comments": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
"integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==",
"engines": { "engines": {
"node": ">=0.10.0" "node": ">=0.10.0"
} }
@ -8317,7 +8404,6 @@
"version": "3.0.0", "version": "3.0.0",
"resolved": "https://registry.npmjs.org/tempy/-/tempy-3.0.0.tgz", "resolved": "https://registry.npmjs.org/tempy/-/tempy-3.0.0.tgz",
"integrity": "sha512-B2I9X7+o2wOaW4r/CWMkpOO9mdiTRCxXNgob6iGvPmfPWgH/KyUD6Uy5crtWBxIBe3YrNZKR2lSzv1JJKWD4vA==", "integrity": "sha512-B2I9X7+o2wOaW4r/CWMkpOO9mdiTRCxXNgob6iGvPmfPWgH/KyUD6Uy5crtWBxIBe3YrNZKR2lSzv1JJKWD4vA==",
"dev": true,
"dependencies": { "dependencies": {
"is-stream": "^3.0.0", "is-stream": "^3.0.0",
"temp-dir": "^2.0.0", "temp-dir": "^2.0.0",
@ -8335,7 +8421,6 @@
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-4.0.0.tgz", "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-4.0.0.tgz",
"integrity": "sha512-x8dy3RnvYdlUcPOjkEHqozhiwzKNSq7GcPuXFbnyMOCHxX8V3OgIg/pYuabl2sbUPfIJaeAQB7PMOK8DFIdoRA==", "integrity": "sha512-x8dy3RnvYdlUcPOjkEHqozhiwzKNSq7GcPuXFbnyMOCHxX8V3OgIg/pYuabl2sbUPfIJaeAQB7PMOK8DFIdoRA==",
"dev": true,
"dependencies": { "dependencies": {
"type-fest": "^1.0.1" "type-fest": "^1.0.1"
}, },
@ -8350,7 +8435,6 @@
"version": "1.4.0", "version": "1.4.0",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz",
"integrity": "sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==", "integrity": "sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==",
"dev": true,
"engines": { "engines": {
"node": ">=10" "node": ">=10"
}, },
@ -8362,7 +8446,6 @@
"version": "2.19.0", "version": "2.19.0",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz",
"integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==",
"dev": true,
"engines": { "engines": {
"node": ">=12.20" "node": ">=12.20"
}, },
@ -8374,7 +8457,6 @@
"version": "3.0.0", "version": "3.0.0",
"resolved": "https://registry.npmjs.org/unique-string/-/unique-string-3.0.0.tgz", "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-3.0.0.tgz",
"integrity": "sha512-VGXBUVwxKMBUznyffQweQABPRRW1vHZAbadFZud4pLFAqRGvv/96vafgjWFqzourzr8YonlQiPgH0YCJfawoGQ==", "integrity": "sha512-VGXBUVwxKMBUznyffQweQABPRRW1vHZAbadFZud4pLFAqRGvv/96vafgjWFqzourzr8YonlQiPgH0YCJfawoGQ==",
"dev": true,
"dependencies": { "dependencies": {
"crypto-random-string": "^4.0.0" "crypto-random-string": "^4.0.0"
}, },
@ -8536,6 +8618,7 @@
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz",
"integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==", "integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==",
"peer": true,
"dependencies": { "dependencies": {
"crypto-random-string": "^2.0.0" "crypto-random-string": "^2.0.0"
}, },
@ -9036,6 +9119,23 @@
"@octokit/openapi-types": "^10.2.2" "@octokit/openapi-types": "^10.2.2"
} }
}, },
"@pnpm/network.ca-file": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@pnpm/network.ca-file/-/network.ca-file-1.0.2.tgz",
"integrity": "sha512-YcPQ8a0jwYU9bTdJDpXjMi7Brhkr1mXsXrUJvjqM2mQDgkRiz8jFaQGOdaLxgjtUfQgZhKy/O3cG/YwmgKaxLA==",
"requires": {
"graceful-fs": "4.2.10"
}
},
"@pnpm/npm-conf": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/@pnpm/npm-conf/-/npm-conf-1.0.5.tgz",
"integrity": "sha512-hD8ml183638O3R6/Txrh0L8VzGOrFXgRtRDG4qQC4tONdZ5Z1M+tlUUDUvrjYdmK6G+JTBTeaCLMna11cXzi8A==",
"requires": {
"@pnpm/network.ca-file": "^1.0.1",
"config-chain": "^1.1.11"
}
},
"@semantic-release/commit-analyzer": { "@semantic-release/commit-analyzer": {
"version": "9.0.2", "version": "9.0.2",
"resolved": "https://registry.npmjs.org/@semantic-release/commit-analyzer/-/commit-analyzer-9.0.2.tgz", "resolved": "https://registry.npmjs.org/@semantic-release/commit-analyzer/-/commit-analyzer-9.0.2.tgz",
@ -9131,23 +9231,23 @@
} }
}, },
"@semantic-release/npm": { "@semantic-release/npm": {
"version": "9.0.0", "version": "10.0.0-beta.1",
"resolved": "https://registry.npmjs.org/@semantic-release/npm/-/npm-9.0.0.tgz", "resolved": "https://registry.npmjs.org/@semantic-release/npm/-/npm-10.0.0-beta.1.tgz",
"integrity": "sha512-hj2jqayS2SPUmFtCMCOQMX975uMDfRoymj1HvMSwYdaoI6hVZvhrTFPBgJeM85O0C+G3IFviAUar5gel/1VGDQ==", "integrity": "sha512-HbfiXZXz0D6yMCfiLXYyAStKie3hmHTH70pkth0s9HfLR8VXMVVCcODbbhzjVIZ5wO7ZR5SgOWzc6f9YpWUt1A==",
"requires": { "requires": {
"@semantic-release/error": "^3.0.0", "@semantic-release/error": "^3.0.0",
"aggregate-error": "^3.0.0", "aggregate-error": "^3.0.0",
"execa": "^5.0.0", "execa": "^5.0.0",
"fs-extra": "^10.0.0", "fs-extra": "^11.0.0",
"lodash": "^4.17.15", "lodash-es": "^4.17.21",
"nerf-dart": "^1.0.0", "nerf-dart": "^1.0.0",
"normalize-url": "^6.0.0", "normalize-url": "^6.0.0",
"npm": "^8.3.0", "npm": "^8.3.0",
"rc": "^1.2.8", "rc": "^1.2.8",
"read-pkg": "^5.0.0", "read-pkg": "^7.0.0",
"registry-auth-token": "^4.0.0", "registry-auth-token": "^5.0.0",
"semver": "^7.1.2", "semver": "^7.1.2",
"tempy": "^1.0.0" "tempy": "^3.0.0"
}, },
"dependencies": { "dependencies": {
"aggregate-error": { "aggregate-error": {
@ -9180,16 +9280,6 @@
"strip-final-newline": "^2.0.0" "strip-final-newline": "^2.0.0"
} }
}, },
"fs-extra": {
"version": "10.1.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz",
"integrity": "sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==",
"requires": {
"graceful-fs": "^4.2.0",
"jsonfile": "^6.0.1",
"universalify": "^2.0.0"
}
},
"human-signals": { "human-signals": {
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz",
@ -9221,27 +9311,26 @@
"mimic-fn": "^2.1.0" "mimic-fn": "^2.1.0"
} }
}, },
"read-pkg": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-7.1.0.tgz",
"integrity": "sha512-5iOehe+WF75IccPc30bWTbpdDQLOCc3Uu8bi3Dte3Eueij81yx1Mrufk8qBx/YAbR4uL1FdUr+7BKXDwEtisXg==",
"requires": {
"@types/normalize-package-data": "^2.4.1",
"normalize-package-data": "^3.0.2",
"parse-json": "^5.2.0",
"type-fest": "^2.0.0"
}
},
"strip-final-newline": { "strip-final-newline": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz",
"integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==" "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA=="
}, },
"tempy": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/tempy/-/tempy-1.0.1.tgz",
"integrity": "sha512-biM9brNqxSc04Ee71hzFbryD11nX7VPhQQY32AdDmjFvodsRFz/3ufeoTZ6uYkRFfGo188tENcASNs3vTdsM0w==",
"requires": {
"del": "^6.0.0",
"is-stream": "^2.0.0",
"temp-dir": "^2.0.0",
"type-fest": "^0.16.0",
"unique-string": "^2.0.0"
}
},
"type-fest": { "type-fest": {
"version": "0.16.0", "version": "2.19.0",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.16.0.tgz", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz",
"integrity": "sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg==" "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA=="
} }
} }
}, },
@ -10190,6 +10279,15 @@
"well-known-symbols": "^2.0.0" "well-known-symbols": "^2.0.0"
} }
}, },
"config-chain": {
"version": "1.1.13",
"resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz",
"integrity": "sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==",
"requires": {
"ini": "^1.3.4",
"proto-list": "~1.2.1"
}
},
"conventional-changelog-angular": { "conventional-changelog-angular": {
"version": "5.0.13", "version": "5.0.13",
"resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-5.0.13.tgz", "resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-5.0.13.tgz",
@ -10307,7 +10405,8 @@
"crypto-random-string": { "crypto-random-string": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz", "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz",
"integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==" "integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==",
"peer": true
}, },
"currently-unhandled": { "currently-unhandled": {
"version": "0.4.1", "version": "0.4.1",
@ -10380,6 +10479,7 @@
"version": "6.1.1", "version": "6.1.1",
"resolved": "https://registry.npmjs.org/del/-/del-6.1.1.tgz", "resolved": "https://registry.npmjs.org/del/-/del-6.1.1.tgz",
"integrity": "sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==", "integrity": "sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==",
"peer": true,
"requires": { "requires": {
"globby": "^11.0.1", "globby": "^11.0.1",
"graceful-fs": "^4.2.4", "graceful-fs": "^4.2.4",
@ -10395,6 +10495,7 @@
"version": "3.1.0", "version": "3.1.0",
"resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz",
"integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==",
"peer": true,
"requires": { "requires": {
"clean-stack": "^2.0.0", "clean-stack": "^2.0.0",
"indent-string": "^4.0.0" "indent-string": "^4.0.0"
@ -10403,12 +10504,14 @@
"clean-stack": { "clean-stack": {
"version": "2.2.0", "version": "2.2.0",
"resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz",
"integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==" "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==",
"peer": true
}, },
"p-map": { "p-map": {
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz",
"integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==",
"peer": true,
"requires": { "requires": {
"aggregate-error": "^3.0.0" "aggregate-error": "^3.0.0"
} }
@ -10797,7 +10900,6 @@
"version": "11.1.0", "version": "11.1.0",
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.1.0.tgz", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.1.0.tgz",
"integrity": "sha512-0rcTq621PD5jM/e0a3EJoGC/1TC5ZBCERW82LQuwfGnCa1V8w7dpYH1yNu+SLb6E5dkeCBzKEyLGlFrnr+dUyw==", "integrity": "sha512-0rcTq621PD5jM/e0a3EJoGC/1TC5ZBCERW82LQuwfGnCa1V8w7dpYH1yNu+SLb6E5dkeCBzKEyLGlFrnr+dUyw==",
"dev": true,
"requires": { "requires": {
"graceful-fs": "^4.2.0", "graceful-fs": "^4.2.0",
"jsonfile": "^6.0.1", "jsonfile": "^6.0.1",
@ -11309,12 +11411,14 @@
"is-path-cwd": { "is-path-cwd": {
"version": "2.2.0", "version": "2.2.0",
"resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz", "resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz",
"integrity": "sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==" "integrity": "sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==",
"peer": true
}, },
"is-path-inside": { "is-path-inside": {
"version": "3.0.3", "version": "3.0.3",
"resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz",
"integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==" "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==",
"peer": true
}, },
"is-plain-obj": { "is-plain-obj": {
"version": "1.1.0", "version": "1.1.0",
@ -13736,6 +13840,11 @@
"integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==", "integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==",
"dev": true "dev": true
}, },
"proto-list": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz",
"integrity": "sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA=="
},
"pump": { "pump": {
"version": "3.0.0", "version": "3.0.0",
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
@ -13929,11 +14038,11 @@
} }
}, },
"registry-auth-token": { "registry-auth-token": {
"version": "4.2.1", "version": "5.0.1",
"resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.1.tgz", "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-5.0.1.tgz",
"integrity": "sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw==", "integrity": "sha512-UfxVOj8seK1yaIOiieV4FIP01vfBDLsY0H9sQzi9EbbUdJiuuBjJgLa1DpImXMNPnVkBD4eVxTEXcrZA6kfpJA==",
"requires": { "requires": {
"rc": "^1.2.8" "@pnpm/npm-conf": "^1.0.4"
} }
}, },
"require-directory": { "require-directory": {
@ -14043,6 +14152,27 @@
"yargs": "^16.2.0" "yargs": "^16.2.0"
}, },
"dependencies": { "dependencies": {
"@semantic-release/npm": {
"version": "9.0.2",
"resolved": "https://registry.npmjs.org/@semantic-release/npm/-/npm-9.0.2.tgz",
"integrity": "sha512-zgsynF6McdzxPnFet+a4iO9HpAlARXOM5adz7VGVCvj0ne8wtL2ZOQoDV2wZPDmdEotDIbVeJjafhelZjs9j6g==",
"peer": true,
"requires": {
"@semantic-release/error": "^3.0.0",
"aggregate-error": "^3.0.0",
"execa": "^5.0.0",
"fs-extra": "^11.0.0",
"lodash": "^4.17.15",
"nerf-dart": "^1.0.0",
"normalize-url": "^6.0.0",
"npm": "^8.3.0",
"rc": "^1.2.8",
"read-pkg": "^5.0.0",
"registry-auth-token": "^5.0.0",
"semver": "^7.1.2",
"tempy": "^1.0.0"
}
},
"aggregate-error": { "aggregate-error": {
"version": "3.1.0", "version": "3.1.0",
"resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz",
@ -14232,6 +14362,27 @@
"integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==",
"peer": true "peer": true
}, },
"tempy": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/tempy/-/tempy-1.0.1.tgz",
"integrity": "sha512-biM9brNqxSc04Ee71hzFbryD11nX7VPhQQY32AdDmjFvodsRFz/3ufeoTZ6uYkRFfGo188tENcASNs3vTdsM0w==",
"peer": true,
"requires": {
"del": "^6.0.0",
"is-stream": "^2.0.0",
"temp-dir": "^2.0.0",
"type-fest": "^0.16.0",
"unique-string": "^2.0.0"
},
"dependencies": {
"type-fest": {
"version": "0.16.0",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.16.0.tgz",
"integrity": "sha512-eaBzG6MxNzEn9kiwvtre90cXaNLkmadMWa1zQMs3XORCXNbsH/OewwbxC5ia9dCxIxnTAsSxXJaa/p5y8DlvJg==",
"peer": true
}
}
},
"type-fest": { "type-fest": {
"version": "0.8.1", "version": "0.8.1",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz",
@ -14605,7 +14756,7 @@
"strip-json-comments": { "strip-json-comments": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
"integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=" "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ=="
}, },
"stubs": { "stubs": {
"version": "3.0.0", "version": "3.0.0",
@ -14745,7 +14896,6 @@
"version": "3.0.0", "version": "3.0.0",
"resolved": "https://registry.npmjs.org/tempy/-/tempy-3.0.0.tgz", "resolved": "https://registry.npmjs.org/tempy/-/tempy-3.0.0.tgz",
"integrity": "sha512-B2I9X7+o2wOaW4r/CWMkpOO9mdiTRCxXNgob6iGvPmfPWgH/KyUD6Uy5crtWBxIBe3YrNZKR2lSzv1JJKWD4vA==", "integrity": "sha512-B2I9X7+o2wOaW4r/CWMkpOO9mdiTRCxXNgob6iGvPmfPWgH/KyUD6Uy5crtWBxIBe3YrNZKR2lSzv1JJKWD4vA==",
"dev": true,
"requires": { "requires": {
"is-stream": "^3.0.0", "is-stream": "^3.0.0",
"temp-dir": "^2.0.0", "temp-dir": "^2.0.0",
@ -14757,7 +14907,6 @@
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-4.0.0.tgz", "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-4.0.0.tgz",
"integrity": "sha512-x8dy3RnvYdlUcPOjkEHqozhiwzKNSq7GcPuXFbnyMOCHxX8V3OgIg/pYuabl2sbUPfIJaeAQB7PMOK8DFIdoRA==", "integrity": "sha512-x8dy3RnvYdlUcPOjkEHqozhiwzKNSq7GcPuXFbnyMOCHxX8V3OgIg/pYuabl2sbUPfIJaeAQB7PMOK8DFIdoRA==",
"dev": true,
"requires": { "requires": {
"type-fest": "^1.0.1" "type-fest": "^1.0.1"
}, },
@ -14765,22 +14914,19 @@
"type-fest": { "type-fest": {
"version": "1.4.0", "version": "1.4.0",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-1.4.0.tgz",
"integrity": "sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==", "integrity": "sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA=="
"dev": true
} }
} }
}, },
"type-fest": { "type-fest": {
"version": "2.19.0", "version": "2.19.0",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz",
"integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA=="
"dev": true
}, },
"unique-string": { "unique-string": {
"version": "3.0.0", "version": "3.0.0",
"resolved": "https://registry.npmjs.org/unique-string/-/unique-string-3.0.0.tgz", "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-3.0.0.tgz",
"integrity": "sha512-VGXBUVwxKMBUznyffQweQABPRRW1vHZAbadFZud4pLFAqRGvv/96vafgjWFqzourzr8YonlQiPgH0YCJfawoGQ==", "integrity": "sha512-VGXBUVwxKMBUznyffQweQABPRRW1vHZAbadFZud4pLFAqRGvv/96vafgjWFqzourzr8YonlQiPgH0YCJfawoGQ==",
"dev": true,
"requires": { "requires": {
"crypto-random-string": "^4.0.0" "crypto-random-string": "^4.0.0"
} }
@ -14905,6 +15051,7 @@
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz",
"integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==", "integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==",
"peer": true,
"requires": { "requires": {
"crypto-random-string": "^2.0.0" "crypto-random-string": "^2.0.0"
} }

View File

@ -29,7 +29,7 @@
"@semantic-release/commit-analyzer": "^9.0.2", "@semantic-release/commit-analyzer": "^9.0.2",
"@semantic-release/error": "^3.0.0", "@semantic-release/error": "^3.0.0",
"@semantic-release/github": "^8.0.0", "@semantic-release/github": "^8.0.0",
"@semantic-release/npm": "^9.0.0", "@semantic-release/npm": "^10.0.0-beta.1",
"@semantic-release/release-notes-generator": "^10.0.0", "@semantic-release/release-notes-generator": "^10.0.0",
"aggregate-error": "^4.0.1", "aggregate-error": "^4.0.1",
"cosmiconfig": "^8.0.0", "cosmiconfig": "^8.0.0",
@ -125,8 +125,8 @@
}, },
"scripts": { "scripts": {
"codecov": "codecov -f coverage/coverage-final.json", "codecov": "codecov -f coverage/coverage-final.json",
"lint": "prettier --check \"*.{js,json,md}\" \".github/**/*.{md,yml}\" \"docs/**/*.md\" \"{bin,lib,test}/*.js\"", "lint": "prettier --check \"*.{js,json,md}\" \".github/**/*.{md,yml}\" \"docs/**/*.md\" \"{bin,lib,test}/**/*.js\"",
"lint:fix": "prettier --write \"*.{js,json,md}\" \".github/**/*.{md,yml}\" \"docs/**/*.md\" \"{bin,lib,test}/*.js\"", "lint:fix": "prettier --write \"*.{js,json,md}\" \".github/**/*.{md,yml}\" \"docs/**/*.md\" \"{bin,lib,test}/**/*.js\"",
"pretest": "npm run lint", "pretest": "npm run lint",
"semantic-release": "./bin/semantic-release.js", "semantic-release": "./bin/semantic-release.js",
"test": "c8 ava --verbose", "test": "c8 ava --verbose",

View File

@ -1,277 +1,277 @@
import test from 'ava'; import test from "ava";
import {union} from 'lodash-es'; import { union } from "lodash-es";
import semver from 'semver'; import semver from "semver";
import * as td from 'testdouble'; import * as td from "testdouble";
const getBranch = (branches, branch) => branches.find(({ name }) => name === branch); const getBranch = (branches, branch) => branches.find(({ name }) => name === branch);
const release = (branches, name, version) => getBranch(branches, name).tags.push({ version }); const release = (branches, name, version) => getBranch(branches, name).tags.push({ version });
const merge = (branches, source, target, tag) => { const merge = (branches, source, target, tag) => {
getBranch(branches, target).tags = union( getBranch(branches, target).tags = union(
getBranch(branches, source).tags.filter(({version}) => !tag || semver.cmp(version, '<=', tag)), getBranch(branches, source).tags.filter(({ version }) => !tag || semver.cmp(version, "<=", tag)),
getBranch(branches, target).tags getBranch(branches, target).tags
); );
}; };
const remoteBranches = []; const remoteBranches = [];
const repositoryUrl = 'repositoryUrl'; const repositoryUrl = "repositoryUrl";
let expand, getTags, getBranches; let expand, getTags, getBranches;
test.beforeEach(async (t) => { test.beforeEach(async (t) => {
getTags = (await td.replaceEsm('../../lib/branches/get-tags.js')).default; getTags = (await td.replaceEsm("../../lib/branches/get-tags.js")).default;
expand = (await td.replaceEsm('../../lib/branches/expand.js')).default; expand = (await td.replaceEsm("../../lib/branches/expand.js")).default;
getBranches = (await import('../../lib/branches/index.js')).default; getBranches = (await import("../../lib/branches/index.js")).default;
}) });
test.afterEach.always((t) => { test.afterEach.always((t) => {
td.reset(); td.reset();
}); });
test.serial('Enforce ranges with branching release workflow', async (t) => { test.serial("Enforce ranges with branching release workflow", async (t) => {
const branches = [ const branches = [
{name: '1.x', tags: []}, { name: "1.x", tags: [] },
{name: '1.0.x', tags: []}, { name: "1.0.x", tags: [] },
{name: 'master', tags: []}, { name: "master", tags: [] },
{name: 'next', tags: []}, { name: "next", tags: [] },
{name: 'next-major', tags: []}, { name: "next-major", tags: [] },
{name: 'beta', prerelease: true, tags: []}, { name: "beta", prerelease: true, tags: [] },
{name: 'alpha', prerelease: true, tags: []}, { name: "alpha", prerelease: true, tags: [] },
]; ];
const context = { options: { branches } }; const context = { options: { branches } };
td.when(expand(repositoryUrl, context, branches)).thenResolve(remoteBranches); td.when(expand(repositoryUrl, context, branches)).thenResolve(remoteBranches);
td.when(getTags(context, remoteBranches)).thenResolve(branches); td.when(getTags(context, remoteBranches)).thenResolve(branches);
let result = (await getBranches(repositoryUrl, 'master', context)).map(({name, range}) => ({name, range,})); let result = (await getBranches(repositoryUrl, "master", context)).map(({ name, range }) => ({ name, range }));
t.is(getBranch(result, '1.0.x').range, '>=1.0.0 <1.0.0', 'Cannot release on 1.0.x before a releasing on master'); t.is(getBranch(result, "1.0.x").range, ">=1.0.0 <1.0.0", "Cannot release on 1.0.x before a releasing on master");
t.is(getBranch(result, '1.x').range, '>=1.1.0 <1.0.0', 'Cannot release on 1.x before a releasing on master'); t.is(getBranch(result, "1.x").range, ">=1.1.0 <1.0.0", "Cannot release on 1.x before a releasing on master");
t.is(getBranch(result, 'master').range, '>=1.0.0'); t.is(getBranch(result, "master").range, ">=1.0.0");
t.is(getBranch(result, 'next').range, '>=1.0.0'); t.is(getBranch(result, "next").range, ">=1.0.0");
t.is(getBranch(result, 'next-major').range, '>=1.0.0'); t.is(getBranch(result, "next-major").range, ">=1.0.0");
release(branches, 'master', '1.0.0'); release(branches, "master", "1.0.0");
result = (await getBranches('repositoryUrl', 'master', context)).map(({name, range}) => ({name, range})); result = (await getBranches("repositoryUrl", "master", context)).map(({ name, range }) => ({ name, range }));
t.is(getBranch(result, '1.0.x').range, '>=1.0.0 <1.0.0', 'Cannot release on 1.0.x before a releasing on master'); t.is(getBranch(result, "1.0.x").range, ">=1.0.0 <1.0.0", "Cannot release on 1.0.x before a releasing on master");
t.is(getBranch(result, '1.x').range, '>=1.1.0 <1.0.0', 'Cannot release on 1.x before a releasing on master'); t.is(getBranch(result, "1.x").range, ">=1.1.0 <1.0.0", "Cannot release on 1.x before a releasing on master");
t.is(getBranch(result, 'master').range, '>=1.0.0'); t.is(getBranch(result, "master").range, ">=1.0.0");
t.is(getBranch(result, 'next').range, '>=1.0.0'); t.is(getBranch(result, "next").range, ">=1.0.0");
t.is(getBranch(result, 'next-major').range, '>=1.0.0'); t.is(getBranch(result, "next-major").range, ">=1.0.0");
release(branches, 'master', '1.0.1'); release(branches, "master", "1.0.1");
result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({ result = (await getBranches("repositoryUrl", "master", { options: { branches } })).map(({ name, range }) => ({
name, name,
range, range,
})); }));
t.is(getBranch(result, 'master').range, '>=1.0.1', 'Can release only > than 1.0.1 on master'); t.is(getBranch(result, "master").range, ">=1.0.1", "Can release only > than 1.0.1 on master");
t.is(getBranch(result, 'next').range, '>=1.0.1', 'Can release only > than 1.0.1 on next'); t.is(getBranch(result, "next").range, ">=1.0.1", "Can release only > than 1.0.1 on next");
t.is(getBranch(result, 'next-major').range, '>=1.0.1', 'Can release only > than 1.0.1 on next-major'); t.is(getBranch(result, "next-major").range, ">=1.0.1", "Can release only > than 1.0.1 on next-major");
merge(branches, 'master', 'next'); merge(branches, "master", "next");
merge(branches, 'master', 'next-major'); merge(branches, "master", "next-major");
result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({ result = (await getBranches("repositoryUrl", "master", { options: { branches } })).map(({ name, range }) => ({
name, name,
range, range,
})); }));
t.is(getBranch(result, 'master').range, '>=1.0.1', 'Can release only > than 1.0.1 on master'); t.is(getBranch(result, "master").range, ">=1.0.1", "Can release only > than 1.0.1 on master");
t.is(getBranch(result, 'next').range, '>=1.0.1', 'Can release only > than 1.0.1 on next'); t.is(getBranch(result, "next").range, ">=1.0.1", "Can release only > than 1.0.1 on next");
t.is(getBranch(result, 'next-major').range, '>=1.0.1', 'Can release only > than 1.0.1 on next-major'); t.is(getBranch(result, "next-major").range, ">=1.0.1", "Can release only > than 1.0.1 on next-major");
release(branches, 'next', '1.1.0'); release(branches, "next", "1.1.0");
release(branches, 'next', '1.1.1'); release(branches, "next", "1.1.1");
result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({ result = (await getBranches("repositoryUrl", "master", { options: { branches } })).map(({ name, range }) => ({
name, name,
range, range,
})); }));
t.is(getBranch(result, 'master').range, '>=1.0.1 <1.1.0', 'Can release only patch, > than 1.0.1 on master'); t.is(getBranch(result, "master").range, ">=1.0.1 <1.1.0", "Can release only patch, > than 1.0.1 on master");
t.is(getBranch(result, 'next').range, '>=1.1.1', 'Can release only > than 1.1.1 on next'); t.is(getBranch(result, "next").range, ">=1.1.1", "Can release only > than 1.1.1 on next");
t.is(getBranch(result, 'next-major').range, '>=1.1.1', 'Can release > than 1.1.1 on next-major'); t.is(getBranch(result, "next-major").range, ">=1.1.1", "Can release > than 1.1.1 on next-major");
release(branches, 'next-major', '2.0.0'); release(branches, "next-major", "2.0.0");
release(branches, 'next-major', '2.0.1'); release(branches, "next-major", "2.0.1");
result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({ result = (await getBranches("repositoryUrl", "master", { options: { branches } })).map(({ name, range }) => ({
name, name,
range, range,
})); }));
t.is(getBranch(result, 'master').range, '>=1.0.1 <1.1.0', 'Can release only patch, > than 1.0.1 on master'); t.is(getBranch(result, "master").range, ">=1.0.1 <1.1.0", "Can release only patch, > than 1.0.1 on master");
t.is(getBranch(result, 'next').range, '>=1.1.1 <2.0.0', 'Can release only patch or minor, > than 1.1.0 on next'); t.is(getBranch(result, "next").range, ">=1.1.1 <2.0.0", "Can release only patch or minor, > than 1.1.0 on next");
t.is(getBranch(result, 'next-major').range, '>=2.0.1', 'Can release any version, > than 2.0.1 on next-major'); t.is(getBranch(result, "next-major").range, ">=2.0.1", "Can release any version, > than 2.0.1 on next-major");
merge(branches, 'next-major', 'beta'); merge(branches, "next-major", "beta");
release(branches, 'beta', '3.0.0-beta.1'); release(branches, "beta", "3.0.0-beta.1");
merge(branches, 'beta', 'alpha'); merge(branches, "beta", "alpha");
release(branches, 'alpha', '4.0.0-alpha.1'); release(branches, "alpha", "4.0.0-alpha.1");
result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({ result = (await getBranches("repositoryUrl", "master", { options: { branches } })).map(({ name, range }) => ({
name, name,
range, range,
})); }));
t.is(getBranch(result, 'next-major').range, '>=2.0.1', 'Can release any version, > than 2.0.1 on next-major'); t.is(getBranch(result, "next-major").range, ">=2.0.1", "Can release any version, > than 2.0.1 on next-major");
merge(branches, 'master', '1.0.x'); merge(branches, "master", "1.0.x");
merge(branches, 'master', '1.x'); merge(branches, "master", "1.x");
result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({ result = (await getBranches("repositoryUrl", "master", { options: { branches } })).map(({ name, range }) => ({
name, name,
range, range,
})); }));
t.is(getBranch(result, 'master').range, '>=1.0.1 <1.1.0', 'Can release only patch, > than 1.0.1 on master'); t.is(getBranch(result, "master").range, ">=1.0.1 <1.1.0", "Can release only patch, > than 1.0.1 on master");
t.is( t.is(
getBranch(result, '1.0.x').range, getBranch(result, "1.0.x").range,
'>=1.0.1 <1.0.1', ">=1.0.1 <1.0.1",
'Cannot release on 1.0.x before >= 1.1.0 is released on master' "Cannot release on 1.0.x before >= 1.1.0 is released on master"
); );
t.is(getBranch(result, '1.x').range, '>=1.1.0 <1.0.1', 'Cannot release on 1.x before >= 1.2.0 is released on master'); t.is(getBranch(result, "1.x").range, ">=1.1.0 <1.0.1", "Cannot release on 1.x before >= 1.2.0 is released on master");
release(branches, 'master', '1.0.2'); release(branches, "master", "1.0.2");
release(branches, 'master', '1.0.3'); release(branches, "master", "1.0.3");
release(branches, 'master', '1.0.4'); release(branches, "master", "1.0.4");
result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({ result = (await getBranches("repositoryUrl", "master", { options: { branches } })).map(({ name, range }) => ({
name, name,
range, range,
})); }));
t.is(getBranch(result, 'master').range, '>=1.0.4 <1.1.0', 'Can release only patch, > than 1.0.4 on master'); t.is(getBranch(result, "master").range, ">=1.0.4 <1.1.0", "Can release only patch, > than 1.0.4 on master");
t.is( t.is(
getBranch(result, '1.0.x').range, getBranch(result, "1.0.x").range,
'>=1.0.1 <1.0.2', ">=1.0.1 <1.0.2",
'Cannot release on 1.0.x before >= 1.1.0 is released on master' "Cannot release on 1.0.x before >= 1.1.0 is released on master"
); );
t.is(getBranch(result, '1.x').range, '>=1.1.0 <1.0.2', 'Cannot release on 1.x before >= 1.2.0 is released on master'); t.is(getBranch(result, "1.x").range, ">=1.1.0 <1.0.2", "Cannot release on 1.x before >= 1.2.0 is released on master");
merge(branches, 'next', 'master'); merge(branches, "next", "master");
result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({ result = (await getBranches("repositoryUrl", "master", { options: { branches } })).map(({ name, range }) => ({
name, name,
range, range,
})); }));
t.is(getBranch(result, 'master').range, '>=1.1.1', 'Can release only > than 1.1.1 on master'); t.is(getBranch(result, "master").range, ">=1.1.1", "Can release only > than 1.1.1 on master");
t.is(getBranch(result, 'next').range, '>=1.1.1 <2.0.0', 'Can release only patch or minor, > than 1.1.1 on next'); t.is(getBranch(result, "next").range, ">=1.1.1 <2.0.0", "Can release only patch or minor, > than 1.1.1 on next");
t.is(getBranch(result, 'next-major').range, '>=2.0.1', 'Can release any version, > than 2.0.1 on next-major'); t.is(getBranch(result, "next-major").range, ">=2.0.1", "Can release any version, > than 2.0.1 on next-major");
t.is( t.is(
getBranch(result, '1.0.x').range, getBranch(result, "1.0.x").range,
'>=1.0.1 <1.0.2', ">=1.0.1 <1.0.2",
'Cannot release on 1.0.x before 1.0.x version from master are merged' "Cannot release on 1.0.x before 1.0.x version from master are merged"
); );
t.is(getBranch(result, '1.x').range, '>=1.1.0 <1.0.2', 'Cannot release on 1.x before >= 2.0.0 is released on master'); t.is(getBranch(result, "1.x").range, ">=1.1.0 <1.0.2", "Cannot release on 1.x before >= 2.0.0 is released on master");
merge(branches, 'master', '1.0.x', '1.0.4'); merge(branches, "master", "1.0.x", "1.0.4");
result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({ result = (await getBranches("repositoryUrl", "master", { options: { branches } })).map(({ name, range }) => ({
name, name,
range, range,
})); }));
t.is(getBranch(result, 'master').range, '>=1.1.1', 'Can release only > than 1.1.1 on master'); t.is(getBranch(result, "master").range, ">=1.1.1", "Can release only > than 1.1.1 on master");
t.is(getBranch(result, '1.0.x').range, '>=1.0.4 <1.1.0', 'Can release on 1.0.x only within range'); t.is(getBranch(result, "1.0.x").range, ">=1.0.4 <1.1.0", "Can release on 1.0.x only within range");
t.is(getBranch(result, '1.x').range, '>=1.1.0 <1.1.0', 'Cannot release on 1.x before >= 2.0.0 is released on master'); t.is(getBranch(result, "1.x").range, ">=1.1.0 <1.1.0", "Cannot release on 1.x before >= 2.0.0 is released on master");
merge(branches, 'master', '1.x'); merge(branches, "master", "1.x");
result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({ result = (await getBranches("repositoryUrl", "master", { options: { branches } })).map(({ name, range }) => ({
name, name,
range, range,
})); }));
t.is(getBranch(result, 'master').range, '>=1.1.1', 'Can release only > than 1.1.1 on master'); t.is(getBranch(result, "master").range, ">=1.1.1", "Can release only > than 1.1.1 on master");
t.is(getBranch(result, '1.0.x').range, '>=1.0.4 <1.1.0', 'Can release on 1.0.x only within range'); t.is(getBranch(result, "1.0.x").range, ">=1.0.4 <1.1.0", "Can release on 1.0.x only within range");
t.is(getBranch(result, '1.x').range, '>=1.1.1 <1.1.1', 'Cannot release on 1.x before >= 2.0.0 is released on master'); t.is(getBranch(result, "1.x").range, ">=1.1.1 <1.1.1", "Cannot release on 1.x before >= 2.0.0 is released on master");
merge(branches, 'next-major', 'next'); merge(branches, "next-major", "next");
merge(branches, 'next', 'master'); merge(branches, "next", "master");
result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({ result = (await getBranches("repositoryUrl", "master", { options: { branches } })).map(({ name, range }) => ({
name, name,
range, range,
})); }));
t.is(getBranch(result, 'master').range, '>=2.0.1', 'Can release only > than 2.0.1 on master'); t.is(getBranch(result, "master").range, ">=2.0.1", "Can release only > than 2.0.1 on master");
t.is(getBranch(result, 'next').range, '>=2.0.1', 'Can release only > than 2.0.1 on next'); t.is(getBranch(result, "next").range, ">=2.0.1", "Can release only > than 2.0.1 on next");
t.is(getBranch(result, 'next-major').range, '>=2.0.1', 'Can release only > than 2.0.1 on next-major'); t.is(getBranch(result, "next-major").range, ">=2.0.1", "Can release only > than 2.0.1 on next-major");
t.is(getBranch(result, '1.x').range, '>=1.1.1 <2.0.0', 'Can release on 1.x only within range'); t.is(getBranch(result, "1.x").range, ">=1.1.1 <2.0.0", "Can release on 1.x only within range");
merge(branches, 'beta', 'master'); merge(branches, "beta", "master");
release(branches, 'master', '3.0.0'); release(branches, "master", "3.0.0");
result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({ result = (await getBranches("repositoryUrl", "master", { options: { branches } })).map(({ name, range }) => ({
name, name,
range, range,
})); }));
t.is(getBranch(result, 'master').range, '>=3.0.0', 'Can release only > than 3.0.0 on master'); t.is(getBranch(result, "master").range, ">=3.0.0", "Can release only > than 3.0.0 on master");
t.is(getBranch(result, 'next').range, '>=3.0.0', 'Can release only > than 3.0.0 on next'); t.is(getBranch(result, "next").range, ">=3.0.0", "Can release only > than 3.0.0 on next");
t.is(getBranch(result, 'next-major').range, '>=3.0.0', 'Can release only > than 3.0.0 on next-major'); t.is(getBranch(result, "next-major").range, ">=3.0.0", "Can release only > than 3.0.0 on next-major");
branches.push({name: '1.1.x', tags: []}); branches.push({ name: "1.1.x", tags: [] });
merge(branches, '1.x', '1.1.x'); merge(branches, "1.x", "1.1.x");
result = (await getBranches('repositoryUrl', 'master', {options: {branches}})).map(({name, range}) => ({ result = (await getBranches("repositoryUrl", "master", { options: { branches } })).map(({ name, range }) => ({
name, name,
range, range,
})); }));
t.is(getBranch(result, '1.0.x').range, '>=1.0.4 <1.1.0', 'Can release on 1.0.x only within range'); t.is(getBranch(result, "1.0.x").range, ">=1.0.4 <1.1.0", "Can release on 1.0.x only within range");
t.is(getBranch(result, '1.1.x').range, '>=1.1.1 <1.2.0', 'Can release on 1.1.x only within range'); t.is(getBranch(result, "1.1.x").range, ">=1.1.1 <1.2.0", "Can release on 1.1.x only within range");
t.is(getBranch(result, '1.x').range, '>=1.2.0 <2.0.0', 'Can release on 1.x only within range'); t.is(getBranch(result, "1.x").range, ">=1.2.0 <2.0.0", "Can release on 1.x only within range");
}); });
test.serial('Throw SemanticReleaseError for invalid configurations', async (t) => { test.serial("Throw SemanticReleaseError for invalid configurations", async (t) => {
const branches = [ const branches = [
{name: '123', range: '123', tags: []}, { name: "123", range: "123", tags: [] },
{name: '1.x', tags: []}, { name: "1.x", tags: [] },
{name: 'maintenance-1', range: '1.x', tags: []}, { name: "maintenance-1", range: "1.x", tags: [] },
{name: '1.x.x', tags: []}, { name: "1.x.x", tags: [] },
{name: 'beta', prerelease: '', tags: []}, { name: "beta", prerelease: "", tags: [] },
{name: 'alpha', prerelease: 'alpha', tags: []}, { name: "alpha", prerelease: "alpha", tags: [] },
{name: 'preview', prerelease: 'alpha', tags: []}, { name: "preview", prerelease: "alpha", tags: [] },
]; ];
const context = { options: { branches } }; const context = { options: { branches } };
td.when(expand(repositoryUrl, context, branches)).thenResolve(remoteBranches); td.when(expand(repositoryUrl, context, branches)).thenResolve(remoteBranches);
td.when(getTags(context, remoteBranches)).thenResolve(branches); td.when(getTags(context, remoteBranches)).thenResolve(branches);
const error = await t.throwsAsync(getBranches(repositoryUrl, 'master', context)); const error = await t.throwsAsync(getBranches(repositoryUrl, "master", context));
const errors = [...error.errors]; const errors = [...error.errors];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, "SemanticReleaseError");
t.is(errors[0].code, 'EMAINTENANCEBRANCH'); t.is(errors[0].code, "EMAINTENANCEBRANCH");
t.truthy(errors[0].message); t.truthy(errors[0].message);
t.truthy(errors[0].details); t.truthy(errors[0].details);
t.is(errors[1].name, 'SemanticReleaseError'); t.is(errors[1].name, "SemanticReleaseError");
t.is(errors[1].code, 'EMAINTENANCEBRANCHES'); t.is(errors[1].code, "EMAINTENANCEBRANCHES");
t.truthy(errors[1].message); t.truthy(errors[1].message);
t.truthy(errors[1].details); t.truthy(errors[1].details);
t.is(errors[2].name, 'SemanticReleaseError'); t.is(errors[2].name, "SemanticReleaseError");
t.is(errors[2].code, 'EPRERELEASEBRANCH'); t.is(errors[2].code, "EPRERELEASEBRANCH");
t.truthy(errors[2].message); t.truthy(errors[2].message);
t.truthy(errors[2].details); t.truthy(errors[2].details);
t.is(errors[3].name, 'SemanticReleaseError'); t.is(errors[3].name, "SemanticReleaseError");
t.is(errors[3].code, 'EPRERELEASEBRANCHES'); t.is(errors[3].code, "EPRERELEASEBRANCHES");
t.truthy(errors[3].message); t.truthy(errors[3].message);
t.truthy(errors[3].details); t.truthy(errors[3].details);
t.is(errors[4].name, 'SemanticReleaseError'); t.is(errors[4].name, "SemanticReleaseError");
t.is(errors[4].code, 'ERELEASEBRANCHES'); t.is(errors[4].code, "ERELEASEBRANCHES");
t.truthy(errors[4].message); t.truthy(errors[4].message);
t.truthy(errors[4].details); t.truthy(errors[4].details);
}); });
test.serial('Throw a SemanticReleaseError if there is duplicate branches', async (t) => { test.serial("Throw a SemanticReleaseError if there is duplicate branches", async (t) => {
const branches = [ const branches = [
{name: 'master', tags: []}, { name: "master", tags: [] },
{name: 'master', tags: []}, { name: "master", tags: [] },
]; ];
const context = { options: { branches } }; const context = { options: { branches } };
td.when(expand(repositoryUrl, context, branches)).thenResolve(remoteBranches); td.when(expand(repositoryUrl, context, branches)).thenResolve(remoteBranches);
td.when(getTags(context, remoteBranches)).thenResolve(branches); td.when(getTags(context, remoteBranches)).thenResolve(branches);
const errors = [...(await t.throwsAsync(getBranches(repositoryUrl, 'master', context))).errors]; const errors = [...(await t.throwsAsync(getBranches(repositoryUrl, "master", context))).errors];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, "SemanticReleaseError");
t.is(errors[0].code, 'EDUPLICATEBRANCHES'); t.is(errors[0].code, "EDUPLICATEBRANCHES");
t.truthy(errors[0].message); t.truthy(errors[0].message);
t.truthy(errors[0].details); t.truthy(errors[0].details);
}); });
test.serial('Throw a SemanticReleaseError for each invalid branch name', async (t) => { test.serial("Throw a SemanticReleaseError for each invalid branch name", async (t) => {
const branches = [ const branches = [
{name: '~master', tags: []}, { name: "~master", tags: [] },
{name: '^master', tags: []}, { name: "^master", tags: [] },
]; ];
const context = { options: { branches } }; const context = { options: { branches } };
const remoteBranches = []; const remoteBranches = [];
td.when(expand(repositoryUrl, context, branches)).thenResolve(remoteBranches); td.when(expand(repositoryUrl, context, branches)).thenResolve(remoteBranches);
td.when(getTags(context, remoteBranches)).thenResolve(branches); td.when(getTags(context, remoteBranches)).thenResolve(branches);
const errors = [...(await t.throwsAsync(getBranches(repositoryUrl, 'master', context))).errors]; const errors = [...(await t.throwsAsync(getBranches(repositoryUrl, "master", context))).errors];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, "SemanticReleaseError");
t.is(errors[0].code, 'EINVALIDBRANCHNAME'); t.is(errors[0].code, "EINVALIDBRANCHNAME");
t.truthy(errors[0].message); t.truthy(errors[0].message);
t.truthy(errors[0].details); t.truthy(errors[0].details);
t.is(errors[1].name, 'SemanticReleaseError'); t.is(errors[1].name, "SemanticReleaseError");
t.is(errors[1].code, 'EINVALIDBRANCHNAME'); t.is(errors[1].code, "EINVALIDBRANCHNAME");
t.truthy(errors[1].message); t.truthy(errors[1].message);
t.truthy(errors[1].details); t.truthy(errors[1].details);
}); });

View File

@ -1,54 +1,54 @@
import test from 'ava'; import test from "ava";
import expand from '../../lib/branches/expand.js'; import expand from "../../lib/branches/expand.js";
import {gitCheckout, gitCommits, gitPush, gitRepo} from '../helpers/git-utils.js'; import { gitCheckout, gitCommits, gitPush, gitRepo } from "../helpers/git-utils.js";
test('Expand branches defined with globs', async (t) => { test("Expand branches defined with globs", async (t) => {
const { cwd, repositoryUrl } = await gitRepo(true); const { cwd, repositoryUrl } = await gitRepo(true);
await gitCommits(['First'], {cwd}); await gitCommits(["First"], { cwd });
await gitPush(repositoryUrl, 'master', {cwd}); await gitPush(repositoryUrl, "master", { cwd });
await gitCheckout('1.0.x', true, {cwd}); await gitCheckout("1.0.x", true, { cwd });
await gitCommits(['Second'], {cwd}); await gitCommits(["Second"], { cwd });
await gitPush(repositoryUrl, '1.0.x', {cwd}); await gitPush(repositoryUrl, "1.0.x", { cwd });
await gitCheckout('1.x.x', true, {cwd}); await gitCheckout("1.x.x", true, { cwd });
await gitCommits(['Third'], {cwd}); await gitCommits(["Third"], { cwd });
await gitPush(repositoryUrl, '1.x.x', {cwd}); await gitPush(repositoryUrl, "1.x.x", { cwd });
await gitCheckout('2.x', true, {cwd}); await gitCheckout("2.x", true, { cwd });
await gitCommits(['Fourth'], {cwd}); await gitCommits(["Fourth"], { cwd });
await gitPush(repositoryUrl, '2.x', {cwd}); await gitPush(repositoryUrl, "2.x", { cwd });
await gitCheckout('next', true, {cwd}); await gitCheckout("next", true, { cwd });
await gitCommits(['Fifth'], {cwd}); await gitCommits(["Fifth"], { cwd });
await gitPush(repositoryUrl, 'next', {cwd}); await gitPush(repositoryUrl, "next", { cwd });
await gitCheckout('pre/foo', true, {cwd}); await gitCheckout("pre/foo", true, { cwd });
await gitCommits(['Sixth'], {cwd}); await gitCommits(["Sixth"], { cwd });
await gitPush(repositoryUrl, 'pre/foo', {cwd}); await gitPush(repositoryUrl, "pre/foo", { cwd });
await gitCheckout('pre/bar', true, {cwd}); await gitCheckout("pre/bar", true, { cwd });
await gitCommits(['Seventh'], {cwd}); await gitCommits(["Seventh"], { cwd });
await gitPush(repositoryUrl, 'pre/bar', {cwd}); await gitPush(repositoryUrl, "pre/bar", { cwd });
await gitCheckout('beta', true, {cwd}); await gitCheckout("beta", true, { cwd });
await gitCommits(['Eighth'], {cwd}); await gitCommits(["Eighth"], { cwd });
await gitPush(repositoryUrl, 'beta', {cwd}); await gitPush(repositoryUrl, "beta", { cwd });
const branches = [ const branches = [
// Should match all maintenance type branches // Should match all maintenance type branches
{name: '+([0-9])?(.{+([0-9]),x}).x'}, { name: "+([0-9])?(.{+([0-9]),x}).x" },
{name: 'master', channel: 'latest'}, { name: "master", channel: "latest" },
{name: 'next'}, { name: "next" },
{name: 'pre/{foo,bar}', channel: `\${name.replace(/^pre\\//g, '')}`, prerelease: true}, { name: "pre/{foo,bar}", channel: `\${name.replace(/^pre\\//g, '')}`, prerelease: true },
// Should be ignored as there is no matching branches in the repo // Should be ignored as there is no matching branches in the repo
{name: 'missing'}, { name: "missing" },
// Should be ignored as the matching branch in the repo is already matched by `/^pre\\/(\\w+)$/gi` // Should be ignored as the matching branch in the repo is already matched by `/^pre\\/(\\w+)$/gi`
{name: '*/foo', channel: 'foo', prerelease: 'foo'}, { name: "*/foo", channel: "foo", prerelease: "foo" },
{name: 'beta', channel: `channel-\${name}`, prerelease: true}, { name: "beta", channel: `channel-\${name}`, prerelease: true },
]; ];
t.deepEqual(await expand(repositoryUrl, { cwd }, branches), [ t.deepEqual(await expand(repositoryUrl, { cwd }, branches), [
{name: '1.0.x'}, { name: "1.0.x" },
{name: '1.x.x'}, { name: "1.x.x" },
{name: '2.x'}, { name: "2.x" },
{name: 'master', channel: 'latest'}, { name: "master", channel: "latest" },
{name: 'next'}, { name: "next" },
{name: 'pre/bar', channel: 'bar', prerelease: true}, { name: "pre/bar", channel: "bar", prerelease: true },
{name: 'pre/foo', channel: 'foo', prerelease: true}, { name: "pre/foo", channel: "foo", prerelease: true },
{name: 'beta', channel: 'channel-beta', prerelease: true}, { name: "beta", channel: "channel-beta", prerelease: true },
]); ]);
}); });

View File

@ -1,153 +1,156 @@
import test from 'ava'; import test from "ava";
import getTags from '../../lib/branches/get-tags.js'; import getTags from "../../lib/branches/get-tags.js";
import {gitAddNote, gitCheckout, gitCommits, gitRepo, gitTagVersion} from '../helpers/git-utils.js'; import { gitAddNote, gitCheckout, gitCommits, gitRepo, gitTagVersion } from "../helpers/git-utils.js";
test('Get the valid tags', async (t) => { test("Get the valid tags", async (t) => {
const { cwd } = await gitRepo(); const { cwd } = await gitRepo();
const commits = await gitCommits(['First'], {cwd}); const commits = await gitCommits(["First"], { cwd });
await gitTagVersion('foo', undefined, {cwd}); await gitTagVersion("foo", undefined, { cwd });
await gitTagVersion('v2.0.0', undefined, {cwd}); await gitTagVersion("v2.0.0", undefined, { cwd });
commits.push(...(await gitCommits(['Second'], {cwd}))); commits.push(...(await gitCommits(["Second"], { cwd })));
await gitTagVersion('v1.0.0', undefined, {cwd}); await gitTagVersion("v1.0.0", undefined, { cwd });
commits.push(...(await gitCommits(['Third'], {cwd}))); commits.push(...(await gitCommits(["Third"], { cwd })));
await gitTagVersion('v3.0', undefined, {cwd}); await gitTagVersion("v3.0", undefined, { cwd });
commits.push(...(await gitCommits(['Fourth'], {cwd}))); commits.push(...(await gitCommits(["Fourth"], { cwd })));
await gitTagVersion('v3.0.0-beta.1', undefined, {cwd}); await gitTagVersion("v3.0.0-beta.1", undefined, { cwd });
const result = await getTags({cwd, options: {tagFormat: `v\${version}`}}, [{name: 'master'}]); const result = await getTags({ cwd, options: { tagFormat: `v\${version}` } }, [{ name: "master" }]);
t.deepEqual(result, [ t.deepEqual(result, [
{ {
name: 'master', name: "master",
tags: [ tags: [
{gitTag: 'v1.0.0', version: '1.0.0', channels: [null]}, { gitTag: "v1.0.0", version: "1.0.0", channels: [null] },
{gitTag: 'v2.0.0', version: '2.0.0', channels: [null]}, { gitTag: "v2.0.0", version: "2.0.0", channels: [null] },
{gitTag: 'v3.0.0-beta.1', version: '3.0.0-beta.1', channels: [null]}, { gitTag: "v3.0.0-beta.1", version: "3.0.0-beta.1", channels: [null] },
], ],
}, },
]); ]);
}); });
test('Get the valid tags from multiple branches', async (t) => { test("Get the valid tags from multiple branches", async (t) => {
const { cwd } = await gitRepo(); const { cwd } = await gitRepo();
await gitCommits(['First'], {cwd}); await gitCommits(["First"], { cwd });
await gitTagVersion('v1.0.0', undefined, {cwd}); await gitTagVersion("v1.0.0", undefined, { cwd });
await gitAddNote(JSON.stringify({channels: [null, '1.x']}), 'v1.0.0', {cwd}); await gitAddNote(JSON.stringify({ channels: [null, "1.x"] }), "v1.0.0", { cwd });
await gitCommits(['Second'], {cwd}); await gitCommits(["Second"], { cwd });
await gitTagVersion('v1.1.0', undefined, {cwd}); await gitTagVersion("v1.1.0", undefined, { cwd });
await gitAddNote(JSON.stringify({channels: [null, '1.x']}), 'v1.1.0', {cwd}); await gitAddNote(JSON.stringify({ channels: [null, "1.x"] }), "v1.1.0", { cwd });
await gitCheckout('1.x', true, {cwd}); await gitCheckout("1.x", true, { cwd });
await gitCheckout('master', false, {cwd}); await gitCheckout("master", false, { cwd });
await gitCommits(['Third'], {cwd}); await gitCommits(["Third"], { cwd });
await gitTagVersion('v2.0.0', undefined, {cwd}); await gitTagVersion("v2.0.0", undefined, { cwd });
await gitAddNote(JSON.stringify({channels: [null, 'next']}), 'v2.0.0', {cwd}); await gitAddNote(JSON.stringify({ channels: [null, "next"] }), "v2.0.0", { cwd });
await gitCheckout('next', true, {cwd}); await gitCheckout("next", true, { cwd });
await gitCommits(['Fourth'], {cwd}); await gitCommits(["Fourth"], { cwd });
await gitTagVersion('v3.0.0', undefined, {cwd}); await gitTagVersion("v3.0.0", undefined, { cwd });
await gitAddNote(JSON.stringify({channels: ['next']}), 'v3.0.0', {cwd}); await gitAddNote(JSON.stringify({ channels: ["next"] }), "v3.0.0", { cwd });
const result = await getTags({ cwd, options: { tagFormat: `v\${version}` } }, [ const result = await getTags({ cwd, options: { tagFormat: `v\${version}` } }, [
{name: '1.x'}, { name: "1.x" },
{name: 'master'}, { name: "master" },
{name: 'next'}, { name: "next" },
]); ]);
t.deepEqual(result, [ t.deepEqual(result, [
{ {
name: '1.x', name: "1.x",
tags: [ tags: [
{gitTag: 'v1.0.0', version: '1.0.0', channels: [null, '1.x']}, { gitTag: "v1.0.0", version: "1.0.0", channels: [null, "1.x"] },
{gitTag: 'v1.1.0', version: '1.1.0', channels: [null, '1.x']}, { gitTag: "v1.1.0", version: "1.1.0", channels: [null, "1.x"] },
], ],
}, },
{ {
name: 'master', name: "master",
tags: [...result[0].tags, {gitTag: 'v2.0.0', version: '2.0.0', channels: [null, 'next']}], tags: [...result[0].tags, { gitTag: "v2.0.0", version: "2.0.0", channels: [null, "next"] }],
}, },
{ {
name: 'next', name: "next",
tags: [...result[1].tags, {gitTag: 'v3.0.0', version: '3.0.0', channels: ['next']}], tags: [...result[1].tags, { gitTag: "v3.0.0", version: "3.0.0", channels: ["next"] }],
}, },
]); ]);
}); });
test('Return branches with and empty tags array if no valid tag is found', async (t) => { test("Return branches with and empty tags array if no valid tag is found", async (t) => {
const { cwd } = await gitRepo(); const { cwd } = await gitRepo();
await gitCommits(['First'], {cwd}); await gitCommits(["First"], { cwd });
await gitTagVersion('foo', undefined, {cwd}); await gitTagVersion("foo", undefined, { cwd });
await gitCommits(['Second'], {cwd}); await gitCommits(["Second"], { cwd });
await gitTagVersion('v2.0.x', undefined, {cwd}); await gitTagVersion("v2.0.x", undefined, { cwd });
await gitCommits(['Third'], {cwd}); await gitCommits(["Third"], { cwd });
await gitTagVersion('v3.0', undefined, {cwd}); await gitTagVersion("v3.0", undefined, { cwd });
const result = await getTags({cwd, options: {tagFormat: `prefix@v\${version}`}}, [{name: 'master'}]); const result = await getTags({ cwd, options: { tagFormat: `prefix@v\${version}` } }, [{ name: "master" }]);
t.deepEqual(result, [{name: 'master', tags: []}]); t.deepEqual(result, [{ name: "master", tags: [] }]);
}); });
test('Return branches with and empty tags array if no valid tag is found in history of configured branches', async (t) => { test("Return branches with and empty tags array if no valid tag is found in history of configured branches", async (t) => {
const { cwd } = await gitRepo(); const { cwd } = await gitRepo();
await gitCommits(['First'], {cwd}); await gitCommits(["First"], { cwd });
await gitCheckout('next', true, {cwd}); await gitCheckout("next", true, { cwd });
await gitCommits(['Second'], {cwd}); await gitCommits(["Second"], { cwd });
await gitTagVersion('v1.0.0', undefined, {cwd}); await gitTagVersion("v1.0.0", undefined, { cwd });
await gitAddNote(JSON.stringify({channels: [null, 'next']}), 'v1.0.0', {cwd}); await gitAddNote(JSON.stringify({ channels: [null, "next"] }), "v1.0.0", { cwd });
await gitCommits(['Third'], {cwd}); await gitCommits(["Third"], { cwd });
await gitTagVersion('v2.0.0', undefined, {cwd}); await gitTagVersion("v2.0.0", undefined, { cwd });
await gitAddNote(JSON.stringify({channels: [null, 'next']}), 'v2.0.0', {cwd}); await gitAddNote(JSON.stringify({ channels: [null, "next"] }), "v2.0.0", { cwd });
await gitCommits(['Fourth'], {cwd}); await gitCommits(["Fourth"], { cwd });
await gitTagVersion('v3.0.0', undefined, {cwd}); await gitTagVersion("v3.0.0", undefined, { cwd });
await gitAddNote(JSON.stringify({channels: [null, 'next']}), 'v3.0.0', {cwd}); await gitAddNote(JSON.stringify({ channels: [null, "next"] }), "v3.0.0", { cwd });
await gitCheckout('master', false, {cwd}); await gitCheckout("master", false, { cwd });
const result = await getTags({cwd, options: {tagFormat: `prefix@v\${version}`}}, [{name: 'master'}, {name: 'next'}]); const result = await getTags({ cwd, options: { tagFormat: `prefix@v\${version}` } }, [
{ name: "master" },
{ name: "next" },
]);
t.deepEqual(result, [ t.deepEqual(result, [
{name: 'master', tags: []}, { name: "master", tags: [] },
{name: 'next', tags: []}, { name: "next", tags: [] },
]); ]);
}); });
test('Get the highest valid tag corresponding to the "tagFormat"', async (t) => { test('Get the highest valid tag corresponding to the "tagFormat"', async (t) => {
const { cwd } = await gitRepo(); const { cwd } = await gitRepo();
await gitCommits(['First'], {cwd}); await gitCommits(["First"], { cwd });
await gitTagVersion('1.0.0', undefined, {cwd}); await gitTagVersion("1.0.0", undefined, { cwd });
t.deepEqual(await getTags({cwd, options: {tagFormat: `\${version}`}}, [{name: 'master'}]), [ t.deepEqual(await getTags({ cwd, options: { tagFormat: `\${version}` } }, [{ name: "master" }]), [
{name: 'master', tags: [{gitTag: '1.0.0', version: '1.0.0', channels: [null]}]}, { name: "master", tags: [{ gitTag: "1.0.0", version: "1.0.0", channels: [null] }] },
]); ]);
await gitTagVersion('foo-1.0.0-bar', undefined, {cwd}); await gitTagVersion("foo-1.0.0-bar", undefined, { cwd });
t.deepEqual(await getTags({cwd, options: {tagFormat: `foo-\${version}-bar`}}, [{name: 'master'}]), [ t.deepEqual(await getTags({ cwd, options: { tagFormat: `foo-\${version}-bar` } }, [{ name: "master" }]), [
{name: 'master', tags: [{gitTag: 'foo-1.0.0-bar', version: '1.0.0', channels: [null]}]}, { name: "master", tags: [{ gitTag: "foo-1.0.0-bar", version: "1.0.0", channels: [null] }] },
]); ]);
await gitTagVersion('foo-v1.0.0-bar', undefined, {cwd}); await gitTagVersion("foo-v1.0.0-bar", undefined, { cwd });
t.deepEqual(await getTags({cwd, options: {tagFormat: `foo-v\${version}-bar`}}, [{name: 'master'}]), [ t.deepEqual(await getTags({ cwd, options: { tagFormat: `foo-v\${version}-bar` } }, [{ name: "master" }]), [
{ {
name: 'master', name: "master",
tags: [{gitTag: 'foo-v1.0.0-bar', version: '1.0.0', channels: [null]}], tags: [{ gitTag: "foo-v1.0.0-bar", version: "1.0.0", channels: [null] }],
}, },
]); ]);
await gitTagVersion('(.+)/1.0.0/(a-z)', undefined, {cwd}); await gitTagVersion("(.+)/1.0.0/(a-z)", undefined, { cwd });
t.deepEqual(await getTags({cwd, options: {tagFormat: `(.+)/\${version}/(a-z)`}}, [{name: 'master'}]), [ t.deepEqual(await getTags({ cwd, options: { tagFormat: `(.+)/\${version}/(a-z)` } }, [{ name: "master" }]), [
{ {
name: 'master', name: "master",
tags: [{gitTag: '(.+)/1.0.0/(a-z)', version: '1.0.0', channels: [null]}], tags: [{ gitTag: "(.+)/1.0.0/(a-z)", version: "1.0.0", channels: [null] }],
}, },
]); ]);
await gitTagVersion('2.0.0-1.0.0-bar.1', undefined, {cwd}); await gitTagVersion("2.0.0-1.0.0-bar.1", undefined, { cwd });
t.deepEqual(await getTags({cwd, options: {tagFormat: `2.0.0-\${version}-bar.1`}}, [{name: 'master'}]), [ t.deepEqual(await getTags({ cwd, options: { tagFormat: `2.0.0-\${version}-bar.1` } }, [{ name: "master" }]), [
{ {
name: 'master', name: "master",
tags: [{gitTag: '2.0.0-1.0.0-bar.1', version: '1.0.0', channels: [null]}], tags: [{ gitTag: "2.0.0-1.0.0-bar.1", version: "1.0.0", channels: [null] }],
}, },
]); ]);
await gitTagVersion('3.0.0-bar.2', undefined, {cwd}); await gitTagVersion("3.0.0-bar.2", undefined, { cwd });
t.deepEqual(await getTags({cwd, options: {tagFormat: `\${version}-bar.2`}}, [{name: 'master'}]), [ t.deepEqual(await getTags({ cwd, options: { tagFormat: `\${version}-bar.2` } }, [{ name: "master" }]), [
{name: 'master', tags: [{gitTag: '3.0.0-bar.2', version: '3.0.0', channels: [null]}]}, { name: "master", tags: [{ gitTag: "3.0.0-bar.2", version: "3.0.0", channels: [null] }] },
]); ]);
}); });

View File

@ -1,15 +1,15 @@
import test from 'ava'; import test from "ava";
import * as normalize from '../../lib/branches/normalize.js'; import * as normalize from "../../lib/branches/normalize.js";
const toTags = (versions) => versions.map((version) => ({ version })); const toTags = (versions) => versions.map((version) => ({ version }));
test('Maintenance branches - initial state', (t) => { test("Maintenance branches - initial state", (t) => {
const maintenance = [ const maintenance = [
{name: '1.x', channel: '1.x', tags: []}, { name: "1.x", channel: "1.x", tags: [] },
{name: '1.1.x', tags: []}, { name: "1.1.x", tags: [] },
{name: '1.2.x', tags: []}, { name: "1.2.x", tags: [] },
]; ];
const release = [{name: 'master', tags: []}]; const release = [{ name: "master", tags: [] }];
t.deepEqual( t.deepEqual(
normalize.maintenance({ maintenance, release }).map(({ type, name, range, accept, channel, mergeRange }) => ({ normalize.maintenance({ maintenance, release }).map(({ type, name, range, accept, channel, mergeRange }) => ({
type, type,
@ -21,44 +21,44 @@ test('Maintenance branches - initial state', (t) => {
})), })),
[ [
{ {
type: 'maintenance', type: "maintenance",
name: '1.1.x', name: "1.1.x",
range: '>=1.1.0 <1.0.0', range: ">=1.1.0 <1.0.0",
accept: [], accept: [],
channel: '1.1.x', channel: "1.1.x",
mergeRange: '>=1.1.0 <1.2.0', mergeRange: ">=1.1.0 <1.2.0",
}, },
{ {
type: 'maintenance', type: "maintenance",
name: '1.2.x', name: "1.2.x",
range: '>=1.2.0 <1.0.0', range: ">=1.2.0 <1.0.0",
accept: [], accept: [],
channel: '1.2.x', channel: "1.2.x",
mergeRange: '>=1.2.0 <1.3.0', mergeRange: ">=1.2.0 <1.3.0",
}, },
{ {
type: 'maintenance', type: "maintenance",
name: '1.x', name: "1.x",
range: '>=1.3.0 <1.0.0', range: ">=1.3.0 <1.0.0",
accept: [], accept: [],
channel: '1.x', channel: "1.x",
mergeRange: '>=1.3.0 <2.0.0', mergeRange: ">=1.3.0 <2.0.0",
}, },
] ]
); );
}); });
test('Maintenance branches - cap range to first release present on default branch and not in any Maintenance one', (t) => { test("Maintenance branches - cap range to first release present on default branch and not in any Maintenance one", (t) => {
const maintenance = [ const maintenance = [
{name: '1.x', tags: toTags(['1.0.0', '1.1.0', '1.1.1', '1.2.0', '1.2.1', '1.3.0', '1.4.0', '1.5.0'])}, { name: "1.x", tags: toTags(["1.0.0", "1.1.0", "1.1.1", "1.2.0", "1.2.1", "1.3.0", "1.4.0", "1.5.0"]) },
{name: 'name', range: '1.1.x', tags: toTags(['1.0.0', '1.0.1', '1.1.0', '1.1.1'])}, { name: "name", range: "1.1.x", tags: toTags(["1.0.0", "1.0.1", "1.1.0", "1.1.1"]) },
{name: '1.2.x', tags: toTags(['1.0.0', '1.1.0', '1.1.1', '1.2.0', '1.2.1'])}, { name: "1.2.x", tags: toTags(["1.0.0", "1.1.0", "1.1.1", "1.2.0", "1.2.1"]) },
{name: '2.x.x', tags: toTags(['1.0.0', '1.1.0', '1.1.1', '1.2.0', '1.2.1', '1.5.0'])}, { name: "2.x.x", tags: toTags(["1.0.0", "1.1.0", "1.1.1", "1.2.0", "1.2.1", "1.5.0"]) },
]; ];
const release = [ const release = [
{ {
name: 'master', name: "master",
tags: toTags(['1.0.0', '1.1.0', '1.1.1', '1.2.0', '1.2.1', '1.3.0', '1.4.0', '1.5.0', '1.6.0', '2.0.0']), tags: toTags(["1.0.0", "1.1.0", "1.1.1", "1.2.0", "1.2.1", "1.3.0", "1.4.0", "1.5.0", "1.6.0", "2.0.0"]),
}, },
]; ];
@ -75,47 +75,47 @@ test('Maintenance branches - cap range to first release present on default branc
})), })),
[ [
{ {
type: 'maintenance', type: "maintenance",
name: 'name', name: "name",
range: '>=1.1.1 <1.2.0', range: ">=1.1.1 <1.2.0",
accept: ['patch'], accept: ["patch"],
channel: 'name', channel: "name",
mergeRange: '>=1.1.0 <1.2.0', mergeRange: ">=1.1.0 <1.2.0",
}, },
{ {
type: 'maintenance', type: "maintenance",
name: '1.2.x', name: "1.2.x",
range: '>=1.2.1 <1.3.0', range: ">=1.2.1 <1.3.0",
accept: ['patch'], accept: ["patch"],
channel: '1.2.x', channel: "1.2.x",
mergeRange: '>=1.2.0 <1.3.0', mergeRange: ">=1.2.0 <1.3.0",
}, },
{ {
type: 'maintenance', type: "maintenance",
name: '1.x', name: "1.x",
range: '>=1.5.0 <1.6.0', range: ">=1.5.0 <1.6.0",
accept: ['patch'], accept: ["patch"],
channel: '1.x', channel: "1.x",
mergeRange: '>=1.3.0 <2.0.0', mergeRange: ">=1.3.0 <2.0.0",
}, },
{ {
type: 'maintenance', type: "maintenance",
name: '2.x.x', name: "2.x.x",
range: '>=2.0.0 <1.6.0', range: ">=2.0.0 <1.6.0",
accept: [], accept: [],
channel: '2.x.x', channel: "2.x.x",
mergeRange: '>=2.0.0 <3.0.0', mergeRange: ">=2.0.0 <3.0.0",
}, },
] ]
); );
}); });
test('Maintenance branches - cap range to default branch last release if all release are also present on maintenance branch', (t) => { test("Maintenance branches - cap range to default branch last release if all release are also present on maintenance branch", (t) => {
const maintenance = [ const maintenance = [
{name: '1.x', tags: toTags(['1.0.0', '1.2.0', '1.3.0'])}, { name: "1.x", tags: toTags(["1.0.0", "1.2.0", "1.3.0"]) },
{name: '2.x.x', tags: toTags(['1.0.0', '1.2.0', '1.3.0', '2.0.0'])}, { name: "2.x.x", tags: toTags(["1.0.0", "1.2.0", "1.3.0", "2.0.0"]) },
]; ];
const release = [{name: 'master', tags: toTags(['1.0.0', '1.2.0', '1.3.0', '2.0.0'])}]; const release = [{ name: "master", tags: toTags(["1.0.0", "1.2.0", "1.3.0", "2.0.0"]) }];
t.deepEqual( t.deepEqual(
normalize.maintenance({ maintenance, release }).map(({ type, name, range, accept, channel, mergeRange }) => ({ normalize.maintenance({ maintenance, release }).map(({ type, name, range, accept, channel, mergeRange }) => ({
@ -128,30 +128,30 @@ test('Maintenance branches - cap range to default branch last release if all rel
})), })),
[ [
{ {
type: 'maintenance', type: "maintenance",
name: '1.x', name: "1.x",
range: '>=1.3.0 <2.0.0', range: ">=1.3.0 <2.0.0",
accept: ['patch', 'minor'], accept: ["patch", "minor"],
channel: '1.x', channel: "1.x",
mergeRange: '>=1.0.0 <2.0.0', mergeRange: ">=1.0.0 <2.0.0",
}, },
{ {
type: 'maintenance', type: "maintenance",
name: '2.x.x', name: "2.x.x",
range: '>=2.0.0 <2.0.0', range: ">=2.0.0 <2.0.0",
accept: [], accept: [],
channel: '2.x.x', channel: "2.x.x",
mergeRange: '>=2.0.0 <3.0.0', mergeRange: ">=2.0.0 <3.0.0",
}, },
] ]
); );
}); });
test('Release branches - initial state', (t) => { test("Release branches - initial state", (t) => {
const release = [ const release = [
{name: 'master', tags: []}, { name: "master", tags: [] },
{name: 'next', channel: 'next', tags: []}, { name: "next", channel: "next", tags: [] },
{name: 'next-major', tags: []}, { name: "next-major", tags: [] },
]; ];
t.deepEqual( t.deepEqual(
@ -160,38 +160,38 @@ test('Release branches - initial state', (t) => {
.map(({ type, name, range, accept, channel, main }) => ({ type, name, range, accept, channel, main })), .map(({ type, name, range, accept, channel, main }) => ({ type, name, range, accept, channel, main })),
[ [
{ {
type: 'release', type: "release",
name: 'master', name: "master",
range: '>=1.0.0', range: ">=1.0.0",
accept: ['patch', 'minor', 'major'], accept: ["patch", "minor", "major"],
channel: undefined, channel: undefined,
main: true, main: true,
}, },
{ {
type: 'release', type: "release",
name: 'next', name: "next",
range: '>=1.0.0', range: ">=1.0.0",
accept: ['patch', 'minor', 'major'], accept: ["patch", "minor", "major"],
channel: 'next', channel: "next",
main: false, main: false,
}, },
{ {
type: 'release', type: "release",
name: 'next-major', name: "next-major",
range: '>=1.0.0', range: ">=1.0.0",
accept: ['patch', 'minor', 'major'], accept: ["patch", "minor", "major"],
channel: 'next-major', channel: "next-major",
main: false, main: false,
}, },
] ]
); );
}); });
test('Release branches - 3 release branches', (t) => { test("Release branches - 3 release branches", (t) => {
const release = [ const release = [
{name: 'master', tags: toTags(['1.0.0', '1.0.1', '1.0.2'])}, { name: "master", tags: toTags(["1.0.0", "1.0.1", "1.0.2"]) },
{name: 'next', tags: toTags(['1.0.0', '1.0.1', '1.0.2', '1.1.0', '1.2.0'])}, { name: "next", tags: toTags(["1.0.0", "1.0.1", "1.0.2", "1.1.0", "1.2.0"]) },
{name: 'next-major', tags: toTags(['1.0.0', '1.0.1', '1.0.2', '1.1.0', '1.2.0', '2.0.0', '2.0.1', '2.1.0'])}, { name: "next-major", tags: toTags(["1.0.0", "1.0.1", "1.0.2", "1.1.0", "1.2.0", "2.0.0", "2.0.1", "2.1.0"]) },
]; ];
t.deepEqual( t.deepEqual(
@ -199,31 +199,31 @@ test('Release branches - 3 release branches', (t) => {
.release({ release }) .release({ release })
.map(({ type, name, range, accept, channel, main }) => ({ type, name, range, accept, channel, main })), .map(({ type, name, range, accept, channel, main }) => ({ type, name, range, accept, channel, main })),
[ [
{type: 'release', name: 'master', range: '>=1.0.2 <1.1.0', accept: ['patch'], channel: undefined, main: true}, { type: "release", name: "master", range: ">=1.0.2 <1.1.0", accept: ["patch"], channel: undefined, main: true },
{ {
type: 'release', type: "release",
name: 'next', name: "next",
range: '>=1.2.0 <2.0.0', range: ">=1.2.0 <2.0.0",
accept: ['patch', 'minor'], accept: ["patch", "minor"],
channel: 'next', channel: "next",
main: false, main: false,
}, },
{ {
type: 'release', type: "release",
name: 'next-major', name: "next-major",
range: '>=2.1.0', range: ">=2.1.0",
accept: ['patch', 'minor', 'major'], accept: ["patch", "minor", "major"],
channel: 'next-major', channel: "next-major",
main: false, main: false,
}, },
] ]
); );
}); });
test('Release branches - 2 release branches', (t) => { test("Release branches - 2 release branches", (t) => {
const release = [ const release = [
{name: 'master', tags: toTags(['1.0.0', '1.0.1', '1.1.0', '1.1.1', '1.2.0'])}, { name: "master", tags: toTags(["1.0.0", "1.0.1", "1.1.0", "1.1.1", "1.2.0"]) },
{name: 'next', tags: toTags(['1.0.0', '1.0.1', '1.1.0', '1.1.1', '1.2.0', '2.0.0', '2.0.1', '2.1.0'])}, { name: "next", tags: toTags(["1.0.0", "1.0.1", "1.1.0", "1.1.1", "1.2.0", "2.0.0", "2.0.1", "2.1.0"]) },
]; ];
t.deepEqual( t.deepEqual(
@ -232,39 +232,41 @@ test('Release branches - 2 release branches', (t) => {
.map(({ type, name, range, accept, channel, main }) => ({ type, name, range, accept, channel, main })), .map(({ type, name, range, accept, channel, main }) => ({ type, name, range, accept, channel, main })),
[ [
{ {
type: 'release', type: "release",
name: 'master', name: "master",
range: '>=1.2.0 <2.0.0', range: ">=1.2.0 <2.0.0",
accept: ['patch', 'minor'], accept: ["patch", "minor"],
channel: undefined, channel: undefined,
main: true, main: true,
}, },
{ {
type: 'release', type: "release",
name: 'next', name: "next",
range: '>=2.1.0', range: ">=2.1.0",
accept: ['patch', 'minor', 'major'], accept: ["patch", "minor", "major"],
channel: 'next', channel: "next",
main: false, main: false,
}, },
] ]
); );
}); });
test('Release branches - 1 release branches', (t) => { test("Release branches - 1 release branches", (t) => {
const release = [{name: 'master', tags: toTags(['1.0.0', '1.1.0', '1.1.1', '1.2.0'])}]; const release = [{ name: "master", tags: toTags(["1.0.0", "1.1.0", "1.1.1", "1.2.0"]) }];
t.deepEqual( t.deepEqual(
normalize.release({release}).map(({type, name, range, accept, channel}) => ({type, name, range, accept, channel})), normalize
[{type: 'release', name: 'master', range: '>=1.2.0', accept: ['patch', 'minor', 'major'], channel: undefined}] .release({ release })
.map(({ type, name, range, accept, channel }) => ({ type, name, range, accept, channel })),
[{ type: "release", name: "master", range: ">=1.2.0", accept: ["patch", "minor", "major"], channel: undefined }]
); );
}); });
test('Release branches - cap ranges to first release only present on following branch', (t) => { test("Release branches - cap ranges to first release only present on following branch", (t) => {
const release = [ const release = [
{name: 'master', tags: toTags(['1.0.0', '1.1.0', '1.2.0', '2.0.0'])}, { name: "master", tags: toTags(["1.0.0", "1.1.0", "1.2.0", "2.0.0"]) },
{name: 'next', tags: toTags(['1.0.0', '1.1.0', '1.2.0', '2.0.0', '2.1.0'])}, { name: "next", tags: toTags(["1.0.0", "1.1.0", "1.2.0", "2.0.0", "2.1.0"]) },
{name: 'next-major', tags: toTags(['1.0.0', '1.1.0', '1.2.0', '2.0.0', '2.1.0', '2.2.0'])}, { name: "next-major", tags: toTags(["1.0.0", "1.1.0", "1.2.0", "2.0.0", "2.1.0", "2.2.0"]) },
]; ];
t.deepEqual( t.deepEqual(
@ -272,24 +274,24 @@ test('Release branches - cap ranges to first release only present on following b
.release({ release }) .release({ release })
.map(({ type, name, range, accept, channel, main }) => ({ type, name, range, accept, channel, main })), .map(({ type, name, range, accept, channel, main }) => ({ type, name, range, accept, channel, main })),
[ [
{type: 'release', name: 'master', range: '>=2.0.0 <2.1.0', accept: ['patch'], channel: undefined, main: true}, { type: "release", name: "master", range: ">=2.0.0 <2.1.0", accept: ["patch"], channel: undefined, main: true },
{type: 'release', name: 'next', range: '>=2.1.0 <2.2.0', accept: ['patch'], channel: 'next', main: false}, { type: "release", name: "next", range: ">=2.1.0 <2.2.0", accept: ["patch"], channel: "next", main: false },
{ {
type: 'release', type: "release",
name: 'next-major', name: "next-major",
range: '>=2.2.0', range: ">=2.2.0",
accept: ['patch', 'minor', 'major'], accept: ["patch", "minor", "major"],
channel: 'next-major', channel: "next-major",
main: false, main: false,
}, },
] ]
); );
}); });
test('Release branches - Handle missing previous tags in branch history', (t) => { test("Release branches - Handle missing previous tags in branch history", (t) => {
const release = [ const release = [
{name: 'master', tags: toTags(['1.0.0', '2.0.0'])}, { name: "master", tags: toTags(["1.0.0", "2.0.0"]) },
{name: 'next', tags: toTags(['1.0.0', '1.1.0', '1.1.1', '1.2.0', '2.0.0'])}, { name: "next", tags: toTags(["1.0.0", "1.1.0", "1.1.1", "1.2.0", "2.0.0"]) },
]; ];
t.deepEqual( t.deepEqual(
@ -298,30 +300,30 @@ test('Release branches - Handle missing previous tags in branch history', (t) =>
.map(({ type, name, range, accept, channel, main }) => ({ type, name, range, accept, channel, main })), .map(({ type, name, range, accept, channel, main }) => ({ type, name, range, accept, channel, main })),
[ [
{ {
type: 'release', type: "release",
name: 'master', name: "master",
range: '>=2.0.0', range: ">=2.0.0",
accept: ['patch', 'minor', 'major'], accept: ["patch", "minor", "major"],
channel: undefined, channel: undefined,
main: true, main: true,
}, },
{ {
type: 'release', type: "release",
name: 'next', name: "next",
range: '>=2.0.0', range: ">=2.0.0",
accept: ['patch', 'minor', 'major'], accept: ["patch", "minor", "major"],
channel: 'next', channel: "next",
main: false, main: false,
}, },
] ]
); );
}); });
test('Release branches - limit releases on 2nd and 3rd branch based on 1st branch last release', (t) => { test("Release branches - limit releases on 2nd and 3rd branch based on 1st branch last release", (t) => {
const release = [ const release = [
{name: 'master', tags: toTags(['1.0.0', '1.1.0', '2.0.0', '3.0.0'])}, { name: "master", tags: toTags(["1.0.0", "1.1.0", "2.0.0", "3.0.0"]) },
{name: 'next', tags: toTags(['1.0.0', '1.1.0'])}, { name: "next", tags: toTags(["1.0.0", "1.1.0"]) },
{name: 'next-major', tags: toTags(['1.0.0', '1.1.0', '2.0.0'])}, { name: "next-major", tags: toTags(["1.0.0", "1.1.0", "2.0.0"]) },
]; ];
t.deepEqual( t.deepEqual(
@ -330,68 +332,68 @@ test('Release branches - limit releases on 2nd and 3rd branch based on 1st branc
.map(({ type, name, range, accept, channel, main }) => ({ type, name, range, accept, channel, main })), .map(({ type, name, range, accept, channel, main }) => ({ type, name, range, accept, channel, main })),
[ [
{ {
type: 'release', type: "release",
name: 'master', name: "master",
range: '>=3.0.0', range: ">=3.0.0",
accept: ['patch', 'minor', 'major'], accept: ["patch", "minor", "major"],
channel: undefined, channel: undefined,
main: true, main: true,
}, },
{ {
type: 'release', type: "release",
name: 'next', name: "next",
range: '>=3.0.0', range: ">=3.0.0",
accept: ['patch', 'minor', 'major'], accept: ["patch", "minor", "major"],
channel: 'next', channel: "next",
main: false, main: false,
}, },
{ {
type: 'release', type: "release",
name: 'next-major', name: "next-major",
range: '>=3.0.0', range: ">=3.0.0",
accept: ['patch', 'minor', 'major'], accept: ["patch", "minor", "major"],
channel: 'next-major', channel: "next-major",
main: false, main: false,
}, },
] ]
); );
}); });
test('Prerelease branches', (t) => { test("Prerelease branches", (t) => {
const prerelease = [ const prerelease = [
{name: 'beta', channel: 'beta', prerelease: true, tags: []}, { name: "beta", channel: "beta", prerelease: true, tags: [] },
{name: 'alpha', prerelease: 'preview', tags: []}, { name: "alpha", prerelease: "preview", tags: [] },
]; ];
t.deepEqual( t.deepEqual(
normalize.prerelease({ prerelease }).map(({ type, name, channel }) => ({ type, name, channel })), normalize.prerelease({ prerelease }).map(({ type, name, channel }) => ({ type, name, channel })),
[ [
{type: 'prerelease', name: 'beta', channel: 'beta'}, { type: "prerelease", name: "beta", channel: "beta" },
{type: 'prerelease', name: 'alpha', channel: 'alpha'}, { type: "prerelease", name: "alpha", channel: "alpha" },
] ]
); );
}); });
test('Allow to set channel to "false" to prevent default', (t) => { test('Allow to set channel to "false" to prevent default', (t) => {
const maintenance = [{name: '1.x', channel: false, tags: []}]; const maintenance = [{ name: "1.x", channel: false, tags: [] }];
const release = [ const release = [
{name: 'master', channel: false, tags: []}, { name: "master", channel: false, tags: [] },
{name: 'next', channel: false, tags: []}, { name: "next", channel: false, tags: [] },
]; ];
const prerelease = [{name: 'beta', channel: false, prerelease: true, tags: []}]; const prerelease = [{ name: "beta", channel: false, prerelease: true, tags: [] }];
t.deepEqual( t.deepEqual(
normalize.maintenance({ maintenance, release }).map(({ name, channel }) => ({ name, channel })), normalize.maintenance({ maintenance, release }).map(({ name, channel }) => ({ name, channel })),
[{name: '1.x', channel: false}] [{ name: "1.x", channel: false }]
); );
t.deepEqual( t.deepEqual(
normalize.release({ release }).map(({ name, channel }) => ({ name, channel })), normalize.release({ release }).map(({ name, channel }) => ({ name, channel })),
[ [
{name: 'master', channel: false}, { name: "master", channel: false },
{name: 'next', channel: false}, { name: "next", channel: false },
] ]
); );
t.deepEqual( t.deepEqual(
normalize.prerelease({ prerelease }).map(({ name, channel }) => ({ name, channel })), normalize.prerelease({ prerelease }).map(({ name, channel }) => ({ name, channel })),
[{name: 'beta', channel: false}] [{ name: "beta", channel: false }]
); );
}); });

View File

@ -1,92 +1,94 @@
import test from 'ava'; import test from "ava";
import {maintenance, prerelease, release} from '../../lib/definitions/branches.js'; import { maintenance, prerelease, release } from "../../lib/definitions/branches.js";
test('A "maintenance" branch is identified by having a "range" property or a "name" formatted like "N.x", "N.x.x" or "N.N.x"', (t) => { test('A "maintenance" branch is identified by having a "range" property or a "name" formatted like "N.x", "N.x.x" or "N.N.x"', (t) => {
/* eslint-disable unicorn/no-fn-reference-in-iterator */ /* eslint-disable unicorn/no-fn-reference-in-iterator */
t.true(maintenance.filter({name: '1.x.x'})); t.true(maintenance.filter({ name: "1.x.x" }));
t.true(maintenance.filter({name: '1.0.x'})); t.true(maintenance.filter({ name: "1.0.x" }));
t.true(maintenance.filter({name: '1.x'})); t.true(maintenance.filter({ name: "1.x" }));
t.true(maintenance.filter({name: 'some-name', range: '1.x.x'})); t.true(maintenance.filter({ name: "some-name", range: "1.x.x" }));
t.true(maintenance.filter({name: 'some-name', range: '1.1.x'})); t.true(maintenance.filter({ name: "some-name", range: "1.1.x" }));
t.true(maintenance.filter({name: 'some-name', range: ''})); t.true(maintenance.filter({ name: "some-name", range: "" }));
t.true(maintenance.filter({name: 'some-name', range: true})); t.true(maintenance.filter({ name: "some-name", range: true }));
t.false(maintenance.filter({name: 'some-name', range: null})); t.false(maintenance.filter({ name: "some-name", range: null }));
t.false(maintenance.filter({name: 'some-name', range: false})); t.false(maintenance.filter({ name: "some-name", range: false }));
t.false(maintenance.filter({name: 'some-name'})); t.false(maintenance.filter({ name: "some-name" }));
t.false(maintenance.filter({name: '1.0.0'})); t.false(maintenance.filter({ name: "1.0.0" }));
t.false(maintenance.filter({name: 'x.x.x'})); t.false(maintenance.filter({ name: "x.x.x" }));
/* eslint-enable unicorn/no-fn-reference-in-iterator */ /* eslint-enable unicorn/no-fn-reference-in-iterator */
}); });
test('A "maintenance" branches must have a "range" property formatted like "N.x", "N.x.x" or "N.N.x"', (t) => { test('A "maintenance" branches must have a "range" property formatted like "N.x", "N.x.x" or "N.N.x"', (t) => {
t.true(maintenance.branchValidator({name: 'some-name', range: '1.x.x'})); t.true(maintenance.branchValidator({ name: "some-name", range: "1.x.x" }));
t.true(maintenance.branchValidator({name: 'some-name', range: '1.1.x'})); t.true(maintenance.branchValidator({ name: "some-name", range: "1.1.x" }));
t.false(maintenance.branchValidator({name: 'some-name', range: '^1.0.0'})); t.false(maintenance.branchValidator({ name: "some-name", range: "^1.0.0" }));
t.false(maintenance.branchValidator({name: 'some-name', range: '>=1.0.0 <2.0.0'})); t.false(maintenance.branchValidator({ name: "some-name", range: ">=1.0.0 <2.0.0" }));
t.false(maintenance.branchValidator({name: 'some-name', range: '1.0.0'})); t.false(maintenance.branchValidator({ name: "some-name", range: "1.0.0" }));
t.false(maintenance.branchValidator({name: 'some-name', range: 'wrong-range'})); t.false(maintenance.branchValidator({ name: "some-name", range: "wrong-range" }));
t.false(maintenance.branchValidator({name: 'some-name', range: true})); t.false(maintenance.branchValidator({ name: "some-name", range: true }));
t.false(maintenance.branchValidator({name: 'some-name', range: ''})); t.false(maintenance.branchValidator({ name: "some-name", range: "" }));
}); });
test('The "maintenance" branches must have unique ranges', (t) => { test('The "maintenance" branches must have unique ranges', (t) => {
t.true(maintenance.branchesValidator([{range: '1.x.x'}, {range: '1.0.x'}])); t.true(maintenance.branchesValidator([{ range: "1.x.x" }, { range: "1.0.x" }]));
t.false(maintenance.branchesValidator([{range: '1.x.x'}, {range: '1.x.x'}])); t.false(maintenance.branchesValidator([{ range: "1.x.x" }, { range: "1.x.x" }]));
t.false(maintenance.branchesValidator([{range: '1.x.x'}, {range: '1.x'}])); t.false(maintenance.branchesValidator([{ range: "1.x.x" }, { range: "1.x" }]));
}); });
test('A "prerelease" branch is identified by having a thruthy "prerelease" property', (t) => { test('A "prerelease" branch is identified by having a thruthy "prerelease" property', (t) => {
/* eslint-disable unicorn/no-fn-reference-in-iterator */ /* eslint-disable unicorn/no-fn-reference-in-iterator */
t.true(prerelease.filter({name: 'some-name', prerelease: true})); t.true(prerelease.filter({ name: "some-name", prerelease: true }));
t.true(prerelease.filter({name: 'some-name', prerelease: 'beta'})); t.true(prerelease.filter({ name: "some-name", prerelease: "beta" }));
t.true(prerelease.filter({name: 'some-name', prerelease: ''})); t.true(prerelease.filter({ name: "some-name", prerelease: "" }));
t.false(prerelease.filter({name: 'some-name', prerelease: null})); t.false(prerelease.filter({ name: "some-name", prerelease: null }));
t.false(prerelease.filter({name: 'some-name', prerelease: false})); t.false(prerelease.filter({ name: "some-name", prerelease: false }));
t.false(prerelease.filter({name: 'some-name'})); t.false(prerelease.filter({ name: "some-name" }));
/* eslint-enable unicorn/no-fn-reference-in-iterator */ /* eslint-enable unicorn/no-fn-reference-in-iterator */
}); });
test('A "prerelease" branch must have a valid prerelease detonation in "prerelease" property or in "name" if "prerelease" is "true"', (t) => { test('A "prerelease" branch must have a valid prerelease detonation in "prerelease" property or in "name" if "prerelease" is "true"', (t) => {
t.true(prerelease.branchValidator({name: 'beta', prerelease: true})); t.true(prerelease.branchValidator({ name: "beta", prerelease: true }));
t.true(prerelease.branchValidator({name: 'some-name', prerelease: 'beta'})); t.true(prerelease.branchValidator({ name: "some-name", prerelease: "beta" }));
t.false(prerelease.branchValidator({name: 'some-name', prerelease: ''})); t.false(prerelease.branchValidator({ name: "some-name", prerelease: "" }));
t.false(prerelease.branchValidator({name: 'some-name', prerelease: null})); t.false(prerelease.branchValidator({ name: "some-name", prerelease: null }));
t.false(prerelease.branchValidator({name: 'some-name', prerelease: false})); t.false(prerelease.branchValidator({ name: "some-name", prerelease: false }));
t.false(prerelease.branchValidator({name: 'some-name', prerelease: '000'})); t.false(prerelease.branchValidator({ name: "some-name", prerelease: "000" }));
t.false(prerelease.branchValidator({name: 'some-name', prerelease: '#beta'})); t.false(prerelease.branchValidator({ name: "some-name", prerelease: "#beta" }));
t.false(prerelease.branchValidator({name: '000', prerelease: true})); t.false(prerelease.branchValidator({ name: "000", prerelease: true }));
t.false(prerelease.branchValidator({name: '#beta', prerelease: true})); t.false(prerelease.branchValidator({ name: "#beta", prerelease: true }));
}); });
test('The "prerelease" branches must have unique "prerelease" property', (t) => { test('The "prerelease" branches must have unique "prerelease" property', (t) => {
t.true(prerelease.branchesValidator([{prerelease: 'beta'}, {prerelease: 'alpha'}])); t.true(prerelease.branchesValidator([{ prerelease: "beta" }, { prerelease: "alpha" }]));
t.false(prerelease.branchesValidator([{range: 'beta'}, {range: 'beta'}, {range: 'alpha'}])); t.false(prerelease.branchesValidator([{ range: "beta" }, { range: "beta" }, { range: "alpha" }]));
}); });
test('A "release" branch is identified by not havin a "range" or "prerelease" property or a "name" formatted like "N.x", "N.x.x" or "N.N.x"', (t) => { test('A "release" branch is identified by not havin a "range" or "prerelease" property or a "name" formatted like "N.x", "N.x.x" or "N.N.x"', (t) => {
/* eslint-disable unicorn/no-fn-reference-in-iterator */ /* eslint-disable unicorn/no-fn-reference-in-iterator */
t.true(release.filter({name: 'some-name'})); t.true(release.filter({ name: "some-name" }));
t.false(release.filter({name: '1.x.x'})); t.false(release.filter({ name: "1.x.x" }));
t.false(release.filter({name: '1.0.x'})); t.false(release.filter({ name: "1.0.x" }));
t.false(release.filter({name: 'some-name', range: '1.x.x'})); t.false(release.filter({ name: "some-name", range: "1.x.x" }));
t.false(release.filter({name: 'some-name', range: '1.1.x'})); t.false(release.filter({ name: "some-name", range: "1.1.x" }));
t.false(release.filter({name: 'some-name', prerelease: true})); t.false(release.filter({ name: "some-name", prerelease: true }));
t.false(release.filter({name: 'some-name', prerelease: 'beta'})); t.false(release.filter({ name: "some-name", prerelease: "beta" }));
/* eslint-enable unicorn/no-fn-reference-in-iterator */ /* eslint-enable unicorn/no-fn-reference-in-iterator */
}); });
test('There must be between 1 and 3 release branches', (t) => { test("There must be between 1 and 3 release branches", (t) => {
t.true(release.branchesValidator([{name: 'branch1'}])); t.true(release.branchesValidator([{ name: "branch1" }]));
t.true(release.branchesValidator([{name: 'branch1'}, {name: 'branch2'}])); t.true(release.branchesValidator([{ name: "branch1" }, { name: "branch2" }]));
t.true(release.branchesValidator([{name: 'branch1'}, {name: 'branch2'}, {name: 'branch3'}])); t.true(release.branchesValidator([{ name: "branch1" }, { name: "branch2" }, { name: "branch3" }]));
t.false(release.branchesValidator([])); t.false(release.branchesValidator([]));
t.false(release.branchesValidator([{name: 'branch1'}, {name: 'branch2'}, {name: 'branch3'}, {name: 'branch4'}])); t.false(
release.branchesValidator([{ name: "branch1" }, { name: "branch2" }, { name: "branch3" }, { name: "branch4" }])
);
}); });

View File

@ -1,15 +1,15 @@
import test from 'ava'; import test from "ava";
import plugins from '../../lib/definitions/plugins.js'; import plugins from "../../lib/definitions/plugins.js";
import {RELEASE_NOTES_SEPARATOR, SECRET_REPLACEMENT} from '../../lib/definitions/constants.js'; import { RELEASE_NOTES_SEPARATOR, SECRET_REPLACEMENT } from "../../lib/definitions/constants.js";
test('The "analyzeCommits" plugin output must be either undefined or a valid semver release type', (t) => { test('The "analyzeCommits" plugin output must be either undefined or a valid semver release type', (t) => {
t.false(plugins.analyzeCommits.outputValidator('invalid')); t.false(plugins.analyzeCommits.outputValidator("invalid"));
t.false(plugins.analyzeCommits.outputValidator(1)); t.false(plugins.analyzeCommits.outputValidator(1));
t.false(plugins.analyzeCommits.outputValidator({})); t.false(plugins.analyzeCommits.outputValidator({}));
t.true(plugins.analyzeCommits.outputValidator()); t.true(plugins.analyzeCommits.outputValidator());
t.true(plugins.analyzeCommits.outputValidator(null)); t.true(plugins.analyzeCommits.outputValidator(null));
t.true(plugins.analyzeCommits.outputValidator('major')); t.true(plugins.analyzeCommits.outputValidator("major"));
}); });
test('The "generateNotes" plugin output, if defined, must be a string', (t) => { test('The "generateNotes" plugin output, if defined, must be a string', (t) => {
@ -18,39 +18,39 @@ test('The "generateNotes" plugin output, if defined, must be a string', (t) => {
t.true(plugins.generateNotes.outputValidator()); t.true(plugins.generateNotes.outputValidator());
t.true(plugins.generateNotes.outputValidator(null)); t.true(plugins.generateNotes.outputValidator(null));
t.true(plugins.generateNotes.outputValidator('')); t.true(plugins.generateNotes.outputValidator(""));
t.true(plugins.generateNotes.outputValidator('string')); t.true(plugins.generateNotes.outputValidator("string"));
}); });
test('The "publish" plugin output, if defined, must be an object or "false"', (t) => { test('The "publish" plugin output, if defined, must be an object or "false"', (t) => {
t.false(plugins.publish.outputValidator(1)); t.false(plugins.publish.outputValidator(1));
t.false(plugins.publish.outputValidator('string')); t.false(plugins.publish.outputValidator("string"));
t.true(plugins.publish.outputValidator({})); t.true(plugins.publish.outputValidator({}));
t.true(plugins.publish.outputValidator()); t.true(plugins.publish.outputValidator());
t.true(plugins.publish.outputValidator(null)); t.true(plugins.publish.outputValidator(null));
t.true(plugins.publish.outputValidator('')); t.true(plugins.publish.outputValidator(""));
t.true(plugins.publish.outputValidator(false)); t.true(plugins.publish.outputValidator(false));
}); });
test('The "addChannel" plugin output, if defined, must be an object', (t) => { test('The "addChannel" plugin output, if defined, must be an object', (t) => {
t.false(plugins.addChannel.outputValidator(1)); t.false(plugins.addChannel.outputValidator(1));
t.false(plugins.addChannel.outputValidator('string')); t.false(plugins.addChannel.outputValidator("string"));
t.true(plugins.addChannel.outputValidator({})); t.true(plugins.addChannel.outputValidator({}));
t.true(plugins.addChannel.outputValidator()); t.true(plugins.addChannel.outputValidator());
t.true(plugins.addChannel.outputValidator(null)); t.true(plugins.addChannel.outputValidator(null));
t.true(plugins.addChannel.outputValidator('')); t.true(plugins.addChannel.outputValidator(""));
}); });
test('The "generateNotes" plugins output are concatenated with separator and sensitive data is hidden', (t) => { test('The "generateNotes" plugins output are concatenated with separator and sensitive data is hidden', (t) => {
const env = {MY_TOKEN: 'secret token'}; const env = { MY_TOKEN: "secret token" };
t.is(plugins.generateNotes.postprocess(['note 1', 'note 2'], {env}), `note 1${RELEASE_NOTES_SEPARATOR}note 2`); t.is(plugins.generateNotes.postprocess(["note 1", "note 2"], { env }), `note 1${RELEASE_NOTES_SEPARATOR}note 2`);
t.is(plugins.generateNotes.postprocess(['', 'note'], {env}), 'note'); t.is(plugins.generateNotes.postprocess(["", "note"], { env }), "note");
t.is(plugins.generateNotes.postprocess([undefined, 'note'], {env}), 'note'); t.is(plugins.generateNotes.postprocess([undefined, "note"], { env }), "note");
t.is(plugins.generateNotes.postprocess(['note 1', '', 'note 2'], {env}), `note 1${RELEASE_NOTES_SEPARATOR}note 2`); t.is(plugins.generateNotes.postprocess(["note 1", "", "note 2"], { env }), `note 1${RELEASE_NOTES_SEPARATOR}note 2`);
t.is( t.is(
plugins.generateNotes.postprocess(['note 1', undefined, 'note 2'], {env}), plugins.generateNotes.postprocess(["note 1", undefined, "note 2"], { env }),
`note 1${RELEASE_NOTES_SEPARATOR}note 2` `note 1${RELEASE_NOTES_SEPARATOR}note 2`
); );
@ -64,11 +64,11 @@ test('The "generateNotes" plugins output are concatenated with separator and sen
}); });
test('The "analyzeCommits" plugins output are reduced to the highest release type', (t) => { test('The "analyzeCommits" plugins output are reduced to the highest release type', (t) => {
t.is(plugins.analyzeCommits.postprocess(['major', 'minor']), 'major'); t.is(plugins.analyzeCommits.postprocess(["major", "minor"]), "major");
t.is(plugins.analyzeCommits.postprocess(['', 'minor']), 'minor'); t.is(plugins.analyzeCommits.postprocess(["", "minor"]), "minor");
t.is(plugins.analyzeCommits.postprocess([undefined, 'patch']), 'patch'); t.is(plugins.analyzeCommits.postprocess([undefined, "patch"]), "patch");
t.is(plugins.analyzeCommits.postprocess([null, 'patch']), 'patch'); t.is(plugins.analyzeCommits.postprocess([null, "patch"]), "patch");
t.is(plugins.analyzeCommits.postprocess(['wrong_type', 'minor']), 'minor'); t.is(plugins.analyzeCommits.postprocess(["wrong_type", "minor"]), "minor");
t.is(plugins.analyzeCommits.postprocess([]), undefined); t.is(plugins.analyzeCommits.postprocess([]), undefined);
t.is(plugins.analyzeCommits.postprocess(['wrong_type']), undefined); t.is(plugins.analyzeCommits.postprocess(["wrong_type"]), undefined);
}); });

View File

@ -1 +1 @@
export default () => {} export default () => {};

View File

@ -1,4 +1,4 @@
import SemanticReleaseError from '@semantic-release/error'; import SemanticReleaseError from "@semantic-release/error";
class InheritedError extends SemanticReleaseError { class InheritedError extends SemanticReleaseError {
constructor(message, code) { constructor(message, code) {
@ -10,5 +10,5 @@ class InheritedError extends SemanticReleaseError {
} }
export default () => { export default () => {
throw new InheritedError('Inherited error', 'EINHERITED'); throw new InheritedError("Inherited error", "EINHERITED");
} };

View File

@ -1,5 +1,5 @@
export default () => { export default () => {
const error = new Error('a'); const error = new Error("a");
error.errorProperty = 'errorProperty'; error.errorProperty = "errorProperty";
throw error; throw error;
} };

View File

@ -1,5 +1,5 @@
import AggregateError from 'aggregate-error'; import AggregateError from "aggregate-error";
export default () => { export default () => {
throw new AggregateError([new Error('a'), new Error('b')]); throw new AggregateError([new Error("a"), new Error("b")]);
} };

View File

@ -0,0 +1,3 @@
export async function verifyConditions(pluginConfig, context) {
context.logger.log("verifyConditions called");
}

View File

@ -1 +1 @@
export default (pluginConfig, context) => context export default (pluginConfig, context) => context;

View File

@ -3,4 +3,4 @@ export default (pluginConfig, {env, logger}) => {
logger.log(`Log: Exposing token ${env.MY_TOKEN}`); logger.log(`Log: Exposing token ${env.MY_TOKEN}`);
logger.error(`Error: Console token ${env.MY_TOKEN}`); logger.error(`Error: Console token ${env.MY_TOKEN}`);
throw new Error(`Throw error: Exposing ${env.MY_TOKEN}`); throw new Error(`Throw error: Exposing ${env.MY_TOKEN}`);
} };

View File

@ -1 +1 @@
export default (pluginConfig, context) => ({pluginConfig, context}) export default (pluginConfig, context) => ({ pluginConfig, context });

View File

@ -1,10 +1,10 @@
import {temporaryDirectory} from 'tempy'; import { temporaryDirectory } from "tempy";
import {execa} from 'execa'; import { execa } from "execa";
import fileUrl from 'file-url'; import fileUrl from "file-url";
import pEachSeries from 'p-each-series'; import pEachSeries from "p-each-series";
import gitLogParser from 'git-log-parser'; import gitLogParser from "git-log-parser";
import getStream from 'get-stream'; import getStream from "get-stream";
import {GIT_NOTE_REF} from '../../lib/definitions/constants.js'; import { GIT_NOTE_REF } from "../../lib/definitions/constants.js";
/** /**
* Commit message information. * Commit message information.
@ -25,11 +25,11 @@ import {GIT_NOTE_REF} from '../../lib/definitions/constants.js';
*/ */
export async function initGit(withRemote) { export async function initGit(withRemote) {
const cwd = temporaryDirectory(); const cwd = temporaryDirectory();
const args = withRemote ? ['--bare', '--initial-branch=master'] : ['--initial-branch=master']; const args = withRemote ? ["--bare", "--initial-branch=master"] : ["--initial-branch=master"];
await execa('git', ['init', ...args], {cwd}).catch(() => { await execa("git", ["init", ...args], { cwd }).catch(() => {
const args = withRemote ? ['--bare'] : []; const args = withRemote ? ["--bare"] : [];
return execa('git', ['init', ...args], {cwd}); return execa("git", ["init", ...args], { cwd });
}); });
const repositoryUrl = fileUrl(cwd); const repositoryUrl = fileUrl(cwd);
return { cwd, repositoryUrl }; return { cwd, repositoryUrl };
@ -45,7 +45,7 @@ export async function initGit(withRemote) {
* @param {String} [branch='master'] The branch to initialize. * @param {String} [branch='master'] The branch to initialize.
* @return {String} The path of the clone if `withRemote` is `true`, the path of the repository otherwise. * @return {String} The path of the clone if `withRemote` is `true`, the path of the repository otherwise.
*/ */
export async function gitRepo(withRemote, branch = 'master') { export async function gitRepo(withRemote, branch = "master") {
let { cwd, repositoryUrl } = await initGit(withRemote); let { cwd, repositoryUrl } = await initGit(withRemote);
if (withRemote) { if (withRemote) {
await initBareRepo(repositoryUrl, branch); await initBareRepo(repositoryUrl, branch);
@ -54,7 +54,7 @@ export async function gitRepo(withRemote, branch = 'master') {
await gitCheckout(branch, true, { cwd }); await gitCheckout(branch, true, { cwd });
} }
await execa('git', ['config', 'commit.gpgsign', false], {cwd}); await execa("git", ["config", "commit.gpgsign", false], { cwd });
return { cwd, repositoryUrl }; return { cwd, repositoryUrl };
} }
@ -70,12 +70,12 @@ export async function gitRepo(withRemote, branch = 'master') {
* @param {String} repositoryUrl The URL of the bare repository. * @param {String} repositoryUrl The URL of the bare repository.
* @param {String} [branch='master'] the branch to initialize. * @param {String} [branch='master'] the branch to initialize.
*/ */
export async function initBareRepo(repositoryUrl, branch = 'master') { export async function initBareRepo(repositoryUrl, branch = "master") {
const cwd = temporaryDirectory(); const cwd = temporaryDirectory();
await execa('git', ['clone', '--no-hardlinks', repositoryUrl, cwd], {cwd}); await execa("git", ["clone", "--no-hardlinks", repositoryUrl, cwd], { cwd });
await gitCheckout(branch, true, { cwd }); await gitCheckout(branch, true, { cwd });
await gitCommits(['Initial commit'], {cwd}); await gitCommits(["Initial commit"], { cwd });
await execa('git', ['push', repositoryUrl, branch], {cwd}); await execa("git", ["push", repositoryUrl, branch], { cwd });
} }
/** /**
@ -90,7 +90,9 @@ export async function gitCommits(messages, execaOptions) {
await pEachSeries( await pEachSeries(
messages, messages,
async (message) => async (message) =>
(await execa('git', ['commit', '-m', message, '--allow-empty', '--no-gpg-sign'], execaOptions)).stdout (
await execa("git", ["commit", "-m", message, "--allow-empty", "--no-gpg-sign"], execaOptions)
).stdout
); );
return (await gitGetCommits(undefined, execaOptions)).slice(0, messages.length); return (await gitGetCommits(undefined, execaOptions)).slice(0, messages.length);
} }
@ -104,11 +106,16 @@ export async function gitCommits(messages, execaOptions) {
* @return {Array<Object>} The list of parsed commits. * @return {Array<Object>} The list of parsed commits.
*/ */
export async function gitGetCommits(from, execaOptions) { export async function gitGetCommits(from, execaOptions) {
Object.assign(gitLogParser.fields, {hash: 'H', message: 'B', gitTags: 'd', committerDate: {key: 'ci', type: Date}}); Object.assign(gitLogParser.fields, {
hash: "H",
message: "B",
gitTags: "d",
committerDate: { key: "ci", type: Date },
});
return ( return (
await getStream.array( await getStream.array(
gitLogParser.parse( gitLogParser.parse(
{_: `${from ? from + '..' : ''}HEAD`}, { _: `${from ? from + ".." : ""}HEAD` },
{ ...execaOptions, env: { ...process.env, ...execaOptions.env } } { ...execaOptions, env: { ...process.env, ...execaOptions.env } }
) )
) )
@ -127,7 +134,7 @@ export async function gitGetCommits(from, execaOptions) {
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
export async function gitCheckout(branch, create, execaOptions) { export async function gitCheckout(branch, create, execaOptions) {
await execa('git', create ? ['checkout', '-b', branch] : ['checkout', branch], execaOptions); await execa("git", create ? ["checkout", "-b", branch] : ["checkout", branch], execaOptions);
} }
/** /**
@ -137,7 +144,7 @@ export async function gitCheckout(branch, create, execaOptions) {
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
export async function gitFetch(repositoryUrl, execaOptions) { export async function gitFetch(repositoryUrl, execaOptions) {
await execa('git', ['fetch', repositoryUrl], execaOptions); await execa("git", ["fetch", repositoryUrl], execaOptions);
} }
/** /**
@ -148,7 +155,7 @@ export async function gitFetch(repositoryUrl, execaOptions) {
* @return {String} The sha of the head commit in the current git repository. * @return {String} The sha of the head commit in the current git repository.
*/ */
export async function gitHead(execaOptions) { export async function gitHead(execaOptions) {
return (await execa('git', ['rev-parse', 'HEAD'], execaOptions)).stdout; return (await execa("git", ["rev-parse", "HEAD"], execaOptions)).stdout;
} }
/** /**
@ -159,7 +166,7 @@ export async function gitHead(execaOptions) {
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
export async function gitTagVersion(tagName, sha, execaOptions) { export async function gitTagVersion(tagName, sha, execaOptions) {
await execa('git', sha ? ['tag', '-f', tagName, sha] : ['tag', tagName], execaOptions); await execa("git", sha ? ["tag", "-f", tagName, sha] : ["tag", tagName], execaOptions);
} }
/** /**
@ -171,10 +178,10 @@ export async function gitTagVersion(tagName, sha, execaOptions) {
* @param {Number} [depth=1] The number of commit to clone. * @param {Number} [depth=1] The number of commit to clone.
* @return {String} The path of the cloned repository. * @return {String} The path of the cloned repository.
*/ */
export async function gitShallowClone(repositoryUrl, branch = 'master', depth = 1) { export async function gitShallowClone(repositoryUrl, branch = "master", depth = 1) {
const cwd = temporaryDirectory(); const cwd = temporaryDirectory();
await execa('git', ['clone', '--no-hardlinks', '--no-tags', '-b', branch, '--depth', depth, repositoryUrl, cwd], { await execa("git", ["clone", "--no-hardlinks", "--no-tags", "-b", branch, "--depth", depth, repositoryUrl, cwd], {
cwd, cwd,
}); });
return cwd; return cwd;
@ -190,21 +197,21 @@ export async function gitShallowClone(repositoryUrl, branch = 'master', depth =
export async function gitDetachedHead(repositoryUrl, head) { export async function gitDetachedHead(repositoryUrl, head) {
const cwd = temporaryDirectory(); const cwd = temporaryDirectory();
await execa('git', ['init'], {cwd}); await execa("git", ["init"], { cwd });
await execa('git', ['remote', 'add', 'origin', repositoryUrl], {cwd}); await execa("git", ["remote", "add", "origin", repositoryUrl], { cwd });
await execa('git', ['fetch', repositoryUrl], {cwd}); await execa("git", ["fetch", repositoryUrl], { cwd });
await execa('git', ['checkout', head], {cwd}); await execa("git", ["checkout", head], { cwd });
return cwd; return cwd;
} }
export async function gitDetachedHeadFromBranch(repositoryUrl, branch, head) { export async function gitDetachedHeadFromBranch(repositoryUrl, branch, head) {
const cwd = temporaryDirectory(); const cwd = temporaryDirectory();
await execa('git', ['init'], {cwd}); await execa("git", ["init"], { cwd });
await execa('git', ['remote', 'add', 'origin', repositoryUrl], {cwd}); await execa("git", ["remote", "add", "origin", repositoryUrl], { cwd });
await execa('git', ['fetch', '--force', repositoryUrl, `${branch}:remotes/origin/${branch}`], {cwd}); await execa("git", ["fetch", "--force", repositoryUrl, `${branch}:remotes/origin/${branch}`], { cwd });
await execa('git', ['reset', '--hard', head], {cwd}); await execa("git", ["reset", "--hard", head], { cwd });
await execa('git', ['checkout', '-q', '-B', branch], {cwd}); await execa("git", ["checkout", "-q", "-B", branch], { cwd });
return cwd; return cwd;
} }
@ -216,7 +223,7 @@ export async function gitDetachedHeadFromBranch(repositoryUrl, branch, head) {
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
export async function gitAddConfig(name, value, execaOptions) { export async function gitAddConfig(name, value, execaOptions) {
await execa('git', ['config', '--add', name, value], execaOptions); await execa("git", ["config", "--add", name, value], execaOptions);
} }
/** /**
@ -228,7 +235,7 @@ export async function gitAddConfig(name, value, execaOptions) {
* @return {String} The sha of the commit associated with `tagName` on the local repository. * @return {String} The sha of the commit associated with `tagName` on the local repository.
*/ */
export async function gitTagHead(tagName, execaOptions) { export async function gitTagHead(tagName, execaOptions) {
return (await execa('git', ['rev-list', '-1', tagName], execaOptions)).stdout; return (await execa("git", ["rev-list", "-1", tagName], execaOptions)).stdout;
} }
/** /**
@ -241,8 +248,8 @@ export async function gitTagHead(tagName, execaOptions) {
* @return {String} The sha of the commit associated with `tagName` on the remote repository. * @return {String} The sha of the commit associated with `tagName` on the remote repository.
*/ */
export async function gitRemoteTagHead(repositoryUrl, tagName, execaOptions) { export async function gitRemoteTagHead(repositoryUrl, tagName, execaOptions) {
return (await execa('git', ['ls-remote', '--tags', repositoryUrl, tagName], execaOptions)).stdout return (await execa("git", ["ls-remote", "--tags", repositoryUrl, tagName], execaOptions)).stdout
.split('\n') .split("\n")
.filter((tag) => Boolean(tag)) .filter((tag) => Boolean(tag))
.map((tag) => tag.match(/^(?<tag>\S+)/)[1])[0]; .map((tag) => tag.match(/^(?<tag>\S+)/)[1])[0];
} }
@ -256,7 +263,7 @@ export async function gitRemoteTagHead(repositoryUrl, tagName, execaOptions) {
* @return {String} The tag associatedwith the sha in parameter or `null`. * @return {String} The tag associatedwith the sha in parameter or `null`.
*/ */
export async function gitCommitTag(gitHead, execaOptions) { export async function gitCommitTag(gitHead, execaOptions) {
return (await execa('git', ['describe', '--tags', '--exact-match', gitHead], execaOptions)).stdout; return (await execa("git", ["describe", "--tags", "--exact-match", gitHead], execaOptions)).stdout;
} }
/** /**
@ -269,7 +276,7 @@ export async function gitCommitTag(gitHead, execaOptions) {
* @throws {Error} if the push failed. * @throws {Error} if the push failed.
*/ */
export async function gitPush(repositoryUrl, branch, execaOptions) { export async function gitPush(repositoryUrl, branch, execaOptions) {
await execa('git', ['push', '--tags', repositoryUrl, `HEAD:${branch}`], execaOptions); await execa("git", ["push", "--tags", repositoryUrl, `HEAD:${branch}`], execaOptions);
} }
/** /**
@ -279,7 +286,7 @@ export async function gitPush(repositoryUrl, branch, execaOptions) {
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
export async function merge(ref, execaOptions) { export async function merge(ref, execaOptions) {
await execa('git', ['merge', '--no-ff', ref], execaOptions); await execa("git", ["merge", "--no-ff", ref], execaOptions);
} }
/** /**
@ -289,7 +296,7 @@ export async function merge(ref, execaOptions) {
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
export async function mergeFf(ref, execaOptions) { export async function mergeFf(ref, execaOptions) {
await execa('git', ['merge', '--ff', ref], execaOptions); await execa("git", ["merge", "--ff", ref], execaOptions);
} }
/** /**
@ -299,7 +306,7 @@ export async function mergeFf(ref, execaOptions) {
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
export async function rebase(ref, execaOptions) { export async function rebase(ref, execaOptions) {
await execa('git', ['rebase', ref], execaOptions); await execa("git", ["rebase", ref], execaOptions);
} }
/** /**
@ -310,7 +317,7 @@ export async function rebase(ref, execaOptions) {
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
export async function gitAddNote(note, ref, execaOptions) { export async function gitAddNote(note, ref, execaOptions) {
await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'add', '-m', note, ref], execaOptions); await execa("git", ["notes", "--ref", GIT_NOTE_REF, "add", "-m", note, ref], execaOptions);
} }
/** /**
@ -320,5 +327,5 @@ export async function gitAddNote(note, ref, execaOptions) {
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
export async function gitGetNote(ref, execaOptions) { export async function gitGetNote(ref, execaOptions) {
return (await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'show', ref], execaOptions)).stdout; return (await execa("git", ["notes", "--ref", GIT_NOTE_REF, "show", ref], execaOptions)).stdout;
} }

View File

@ -1,14 +1,14 @@
import Docker from 'dockerode'; import Docker from "dockerode";
import getStream from 'get-stream'; import getStream from "get-stream";
import pRetry from 'p-retry'; import pRetry from "p-retry";
import {gitShallowClone, initBareRepo} from './git-utils.js'; import { gitShallowClone, initBareRepo } from "./git-utils.js";
const IMAGE = 'semanticrelease/docker-gitbox:latest'; const IMAGE = "semanticrelease/docker-gitbox:latest";
const SERVER_PORT = 80; const SERVER_PORT = 80;
const HOST_PORT = 2080; const HOST_PORT = 2080;
const SERVER_HOST = 'localhost'; const SERVER_HOST = "localhost";
const GIT_USERNAME = 'integration'; const GIT_USERNAME = "integration";
const GIT_PASSWORD = 'suchsecure'; const GIT_PASSWORD = "suchsecure";
const docker = new Docker(); const docker = new Docker();
let container; let container;
@ -28,7 +28,7 @@ export async function start() {
await container.start(); await container.start();
const exec = await container.exec({ const exec = await container.exec({
Cmd: ['ng-auth', '-u', GIT_USERNAME, '-p', GIT_PASSWORD], Cmd: ["ng-auth", "-u", GIT_USERNAME, "-p", GIT_PASSWORD],
AttachStdout: true, AttachStdout: true,
AttachStderr: true, AttachStderr: true,
}); });
@ -51,9 +51,9 @@ export async function stop() {
* @param {String} [description=`Repository ${name}`] The repository description. * @param {String} [description=`Repository ${name}`] The repository description.
* @return {Object} The `repositoryUrl` (URL without auth) and `authUrl` (URL with auth). * @return {Object} The `repositoryUrl` (URL without auth) and `authUrl` (URL with auth).
*/ */
export async function createRepo(name, branch = 'master', description = `Repository ${name}`) { export async function createRepo(name, branch = "master", description = `Repository ${name}`) {
const exec = await container.exec({ const exec = await container.exec({
Cmd: ['repo-admin', '-n', name, '-d', description], Cmd: ["repo-admin", "-n", name, "-d", description],
AttachStdout: true, AttachStdout: true,
AttachStderr: true, AttachStderr: true,
}); });

View File

@ -1,12 +1,12 @@
import Docker from 'dockerode'; import Docker from "dockerode";
import getStream from 'get-stream'; import getStream from "get-stream";
import got from 'got'; import got from "got";
import pRetry from 'p-retry'; import pRetry from "p-retry";
import {mockServerClient} from 'mockserver-client'; import { mockServerClient } from "mockserver-client";
const IMAGE = 'mockserver/mockserver:latest'; const IMAGE = "mockserver/mockserver:latest";
const MOCK_SERVER_PORT = 1080; const MOCK_SERVER_PORT = 1080;
const MOCK_SERVER_HOST = 'localhost'; const MOCK_SERVER_HOST = "localhost";
const docker = new Docker(); const docker = new Docker();
let container; let container;
@ -68,13 +68,13 @@ export const url = `http://${MOCK_SERVER_HOST}:${MOCK_SERVER_PORT}`;
export async function mock( export async function mock(
path, path,
{ body: requestBody, headers: requestHeaders }, { body: requestBody, headers: requestHeaders },
{method = 'POST', statusCode = 200, body: responseBody} { method = "POST", statusCode = 200, body: responseBody }
) { ) {
await client.mockAnyResponse({ await client.mockAnyResponse({
httpRequest: { path, method }, httpRequest: { path, method },
httpResponse: { httpResponse: {
statusCode, statusCode,
headers: [{name: 'Content-Type', values: ['application/json; charset=utf-8']}], headers: [{ name: "Content-Type", values: ["application/json; charset=utf-8"] }],
body: JSON.stringify(responseBody), body: JSON.stringify(responseBody),
}, },
times: { remainingTimes: 1, unlimited: false }, times: { remainingTimes: 1, unlimited: false },
@ -85,7 +85,7 @@ export async function mock(
path, path,
headers: requestHeaders, headers: requestHeaders,
body: requestBody body: requestBody
? {type: 'JSON', json: JSON.stringify(requestBody), matchType: 'ONLY_MATCHING_FIELDS'} ? { type: "JSON", json: JSON.stringify(requestBody), matchType: "ONLY_MATCHING_FIELDS" }
: undefined, : undefined,
}; };
} }

View File

@ -1,17 +1,17 @@
import path, {dirname} from 'node:path'; import path, { dirname } from "node:path";
import {fileURLToPath} from 'node:url'; import { fileURLToPath } from "node:url";
import Docker from 'dockerode'; import Docker from "dockerode";
import getStream from 'get-stream'; import getStream from "get-stream";
import got from 'got'; import got from "got";
import delay from 'delay'; import delay from "delay";
import pRetry from 'p-retry'; import pRetry from "p-retry";
const IMAGE = 'verdaccio/verdaccio:5'; const IMAGE = "verdaccio/verdaccio:5";
const REGISTRY_PORT = 4873; const REGISTRY_PORT = 4873;
const REGISTRY_HOST = 'localhost'; const REGISTRY_HOST = "localhost";
const NPM_USERNAME = 'integration'; const NPM_USERNAME = "integration";
const NPM_PASSWORD = 'suchsecure'; const NPM_PASSWORD = "suchsecure";
const NPM_EMAIL = 'integration@test.com'; const NPM_EMAIL = "integration@test.com";
const docker = new Docker(); const docker = new Docker();
const __dirname = dirname(fileURLToPath(import.meta.url)); const __dirname = dirname(fileURLToPath(import.meta.url));
let container, npmToken; let container, npmToken;
@ -26,7 +26,7 @@ export async function start() {
Tty: true, Tty: true,
Image: IMAGE, Image: IMAGE,
PortBindings: { [`${REGISTRY_PORT}/tcp`]: [{ HostPort: `${REGISTRY_PORT}` }] }, PortBindings: { [`${REGISTRY_PORT}/tcp`]: [{ HostPort: `${REGISTRY_PORT}` }] },
Binds: [`${path.join(__dirname, 'config.yaml')}:/verdaccio/conf/config.yaml`], Binds: [`${path.join(__dirname, "config.yaml")}:/verdaccio/conf/config.yaml`],
}); });
await container.start(); await container.start();
@ -45,12 +45,12 @@ export async function start() {
// Create user // Create user
await got(`http://${REGISTRY_HOST}:${REGISTRY_PORT}/-/user/org.couchdb.user:${NPM_USERNAME}`, { await got(`http://${REGISTRY_HOST}:${REGISTRY_PORT}/-/user/org.couchdb.user:${NPM_USERNAME}`, {
method: 'PUT', method: "PUT",
json: { json: {
_id: `org.couchdb.user:${NPM_USERNAME}`, _id: `org.couchdb.user:${NPM_USERNAME}`,
name: NPM_USERNAME, name: NPM_USERNAME,
roles: [], roles: [],
type: 'user', type: "user",
password: NPM_PASSWORD, password: NPM_PASSWORD,
email: NPM_EMAIL, email: NPM_EMAIL,
}, },
@ -60,9 +60,9 @@ export async function start() {
({ token: npmToken } = await got(`http://${REGISTRY_HOST}:${REGISTRY_PORT}/-/npm/v1/tokens`, { ({ token: npmToken } = await got(`http://${REGISTRY_HOST}:${REGISTRY_PORT}/-/npm/v1/tokens`, {
username: NPM_USERNAME, username: NPM_USERNAME,
password: NPM_PASSWORD, password: NPM_PASSWORD,
method: 'POST', method: "POST",
headers: {'content-type': 'application/json'}, headers: { "content-type": "application/json" },
json: {password: NPM_PASSWORD, readonly: false, cidr_whitelist: []} json: { password: NPM_PASSWORD, readonly: false, cidr_whitelist: [] },
}).json()); }).json());
} }

View File

@ -1,5 +1,5 @@
import {execa} from 'execa'; import { execa } from "execa";
export async function npmView(packageName, env) { export async function npmView(packageName, env) {
return JSON.parse((await execa('npm', ['view', packageName, '--json'], {env})).stdout); return JSON.parse((await execa("npm", ["view", packageName, "--json"], { env })).stdout);
} }

View File

@ -44,6 +44,7 @@ const cli = path.resolve("./bin/semantic-release.js");
const pluginError = path.resolve("./test/fixtures/plugin-error"); const pluginError = path.resolve("./test/fixtures/plugin-error");
const pluginInheritedError = path.resolve("./test/fixtures/plugin-error-inherited"); const pluginInheritedError = path.resolve("./test/fixtures/plugin-error-inherited");
const pluginLogEnv = path.resolve("./test/fixtures/plugin-log-env"); const pluginLogEnv = path.resolve("./test/fixtures/plugin-log-env");
const pluginEsmNamedExports = path.resolve("./test/fixtures/plugin-esm-named-exports");
test.before(async () => { test.before(async () => {
await Promise.all([gitbox.start(), npmRegistry.start(), mockServer.start()]); await Promise.all([gitbox.start(), npmRegistry.start(), mockServer.start()]);
@ -713,3 +714,26 @@ test("Use the repository URL as is if none of the given git credentials are vali
dummyUrl dummyUrl
); );
}); });
test("ESM Plugin with named exports", async (t) => {
const packageName = "log-secret";
// Create a git repository, set the current working directory at the root of the repo
t.log("Create git repository");
const { cwd, repositoryUrl } = await gitbox.createRepo(packageName);
await writeJson(path.resolve(cwd, "package.json"), {
name: packageName,
version: "0.0.0-dev",
repository: { url: repositoryUrl },
release: { plugins: [pluginEsmNamedExports] },
});
t.log("$ semantic-release");
const { stdout, stderr } = await execa(cli, [], {
env: { ...env, MY_TOKEN: "secret token" },
cwd,
reject: false,
extendEnv: false,
});
t.regex(stdout, new RegExp(`verifyConditions called`));
});

View File

@ -1,7 +1,7 @@
import test from 'ava'; import test from "ava";
import {noop} from 'lodash-es'; import { noop } from "lodash-es";
import {stub} from 'sinon'; import { stub } from "sinon";
import normalize from '../../lib/plugins/normalize.js'; import normalize from "../../lib/plugins/normalize.js";
const cwd = process.cwd(); const cwd = process.cwd();
@ -19,82 +19,92 @@ test.beforeEach((t) => {
}; };
}); });
test('Normalize and load plugin from string', async (t) => { test("Normalize and load plugin from string", async (t) => {
const plugin = await normalize( const plugin = await normalize(
{ cwd, options: {}, logger: t.context.logger }, { cwd, options: {}, logger: t.context.logger },
'verifyConditions', "verifyConditions",
'./test/fixtures/plugin-noop.cjs', "./test/fixtures/plugin-noop.cjs",
{} {}
); );
t.is(plugin.pluginName, './test/fixtures/plugin-noop.cjs'); t.is(plugin.pluginName, "./test/fixtures/plugin-noop.cjs");
t.is(typeof plugin, 'function'); t.is(typeof plugin, "function");
t.deepEqual(t.context.success.args[0], ['Loaded plugin "verifyConditions" from "./test/fixtures/plugin-noop.cjs"']); t.deepEqual(t.context.success.args[0], ['Loaded plugin "verifyConditions" from "./test/fixtures/plugin-noop.cjs"']);
}); });
test('Normalize and load plugin from object', async (t) => { test("Normalize and load plugin from object", async (t) => {
const plugin = await normalize( const plugin = await normalize(
{ cwd, options: {}, logger: t.context.logger }, { cwd, options: {}, logger: t.context.logger },
'publish', "publish",
{path: './test/fixtures/plugin-noop.cjs'}, { path: "./test/fixtures/plugin-noop.cjs" },
{} {}
); );
t.is(plugin.pluginName, './test/fixtures/plugin-noop.cjs'); t.is(plugin.pluginName, "./test/fixtures/plugin-noop.cjs");
t.is(typeof plugin, 'function'); t.is(typeof plugin, "function");
t.deepEqual(t.context.success.args[0], ['Loaded plugin "publish" from "./test/fixtures/plugin-noop.cjs"']); t.deepEqual(t.context.success.args[0], ['Loaded plugin "publish" from "./test/fixtures/plugin-noop.cjs"']);
}); });
test('Normalize and load plugin from a base file path', async (t) => { test("Normalize and load plugin from a base file path", async (t) => {
const plugin = await normalize({cwd, options: {}, logger: t.context.logger}, 'verifyConditions', './plugin-noop.cjs', { const plugin = await normalize(
'./plugin-noop.cjs': './test/fixtures', { cwd, options: {}, logger: t.context.logger },
}); "verifyConditions",
"./plugin-noop.cjs",
{
"./plugin-noop.cjs": "./test/fixtures",
}
);
t.is(plugin.pluginName, './plugin-noop.cjs'); t.is(plugin.pluginName, "./plugin-noop.cjs");
t.is(typeof plugin, 'function'); t.is(typeof plugin, "function");
t.deepEqual(t.context.success.args[0], [ t.deepEqual(t.context.success.args[0], [
'Loaded plugin "verifyConditions" from "./plugin-noop.cjs" in shareable config "./test/fixtures"', 'Loaded plugin "verifyConditions" from "./plugin-noop.cjs" in shareable config "./test/fixtures"',
]); ]);
}); });
test('Wrap plugin in a function that add the "pluginName" to the error"', async (t) => { test('Wrap plugin in a function that add the "pluginName" to the error"', async (t) => {
const plugin = await normalize({cwd, options: {}, logger: t.context.logger}, 'verifyConditions', './plugin-error', { const plugin = await normalize({ cwd, options: {}, logger: t.context.logger }, "verifyConditions", "./plugin-error", {
'./plugin-error': './test/fixtures', "./plugin-error": "./test/fixtures",
}); });
const error = await t.throwsAsync(plugin({ options: {} })); const error = await t.throwsAsync(plugin({ options: {} }));
t.is(error.pluginName, './plugin-error'); t.is(error.pluginName, "./plugin-error");
}); });
test('Wrap plugin in a function that add the "pluginName" to multiple errors"', async (t) => { test('Wrap plugin in a function that add the "pluginName" to multiple errors"', async (t) => {
const plugin = await normalize({cwd, options: {}, logger: t.context.logger}, 'verifyConditions', './plugin-errors', { const plugin = await normalize(
'./plugin-errors': './test/fixtures', { cwd, options: {}, logger: t.context.logger },
}); "verifyConditions",
"./plugin-errors",
{
"./plugin-errors": "./test/fixtures",
}
);
const errors = [...(await t.throwsAsync(plugin({ options: {} }))).errors]; const errors = [...(await t.throwsAsync(plugin({ options: {} }))).errors];
for (const error of errors) { for (const error of errors) {
t.is(error.pluginName, './plugin-errors'); t.is(error.pluginName, "./plugin-errors");
} }
}); });
test('Normalize and load plugin from function', async (t) => { test("Normalize and load plugin from function", async (t) => {
const pluginFunction = () => {}; const pluginFunction = () => {};
const plugin = await normalize({cwd, options: {}, logger: t.context.logger}, '', pluginFunction, {}); const plugin = await normalize({ cwd, options: {}, logger: t.context.logger }, "", pluginFunction, {});
t.is(plugin.pluginName, '[Function: pluginFunction]'); t.is(plugin.pluginName, "[Function: pluginFunction]");
t.is(typeof plugin, 'function'); t.is(typeof plugin, "function");
}); });
test('Normalize and load plugin that retuns multiple functions', async (t) => { test("Normalize and load plugin that retuns multiple functions", async (t) => {
const plugin = await normalize( const plugin = await normalize(
{ cwd, options: {}, logger: t.context.logger }, { cwd, options: {}, logger: t.context.logger },
'verifyConditions', "verifyConditions",
'./test/fixtures/multi-plugin.cjs', "./test/fixtures/multi-plugin.cjs",
{} {}
); );
t.is(typeof plugin, 'function'); t.is(typeof plugin, "function");
t.deepEqual(t.context.success.args[0], ['Loaded plugin "verifyConditions" from "./test/fixtures/multi-plugin.cjs"']); t.deepEqual(t.context.success.args[0], ['Loaded plugin "verifyConditions" from "./test/fixtures/multi-plugin.cjs"']);
}); });
@ -102,15 +112,15 @@ test('Wrap "analyzeCommits" plugin in a function that validate the output of the
const analyzeCommits = stub().resolves(2); const analyzeCommits = stub().resolves(2);
const plugin = await normalize( const plugin = await normalize(
{ cwd, options: {}, stderr: t.context.stderr, logger: t.context.logger }, { cwd, options: {}, stderr: t.context.stderr, logger: t.context.logger },
'analyzeCommits', "analyzeCommits",
analyzeCommits, analyzeCommits,
{} {}
); );
const error = await t.throwsAsync(plugin({ options: {} })); const error = await t.throwsAsync(plugin({ options: {} }));
t.is(error.code, 'EANALYZECOMMITSOUTPUT'); t.is(error.code, "EANALYZECOMMITSOUTPUT");
t.is(error.name, 'SemanticReleaseError'); t.is(error.name, "SemanticReleaseError");
t.truthy(error.message); t.truthy(error.message);
t.truthy(error.details); t.truthy(error.details);
t.regex(error.details, /2/); t.regex(error.details, /2/);
@ -120,15 +130,15 @@ test('Wrap "generateNotes" plugin in a function that validate the output of the
const generateNotes = stub().resolves(2); const generateNotes = stub().resolves(2);
const plugin = await normalize( const plugin = await normalize(
{ cwd, options: {}, stderr: t.context.stderr, logger: t.context.logger }, { cwd, options: {}, stderr: t.context.stderr, logger: t.context.logger },
'generateNotes', "generateNotes",
generateNotes, generateNotes,
{} {}
); );
const error = await t.throwsAsync(plugin({ options: {} })); const error = await t.throwsAsync(plugin({ options: {} }));
t.is(error.code, 'EGENERATENOTESOUTPUT'); t.is(error.code, "EGENERATENOTESOUTPUT");
t.is(error.name, 'SemanticReleaseError'); t.is(error.name, "SemanticReleaseError");
t.truthy(error.message); t.truthy(error.message);
t.truthy(error.details); t.truthy(error.details);
t.regex(error.details, /2/); t.regex(error.details, /2/);
@ -138,15 +148,15 @@ test('Wrap "publish" plugin in a function that validate the output of the plugin
const publish = stub().resolves(2); const publish = stub().resolves(2);
const plugin = await normalize( const plugin = await normalize(
{ cwd, options: {}, stderr: t.context.stderr, logger: t.context.logger }, { cwd, options: {}, stderr: t.context.stderr, logger: t.context.logger },
'publish', "publish",
publish, publish,
{} {}
); );
const error = await t.throwsAsync(plugin({ options: {} })); const error = await t.throwsAsync(plugin({ options: {} }));
t.is(error.code, 'EPUBLISHOUTPUT'); t.is(error.code, "EPUBLISHOUTPUT");
t.is(error.name, 'SemanticReleaseError'); t.is(error.name, "SemanticReleaseError");
t.truthy(error.message); t.truthy(error.message);
t.truthy(error.details); t.truthy(error.details);
t.regex(error.details, /2/); t.regex(error.details, /2/);
@ -156,15 +166,15 @@ test('Wrap "addChannel" plugin in a function that validate the output of the plu
const addChannel = stub().resolves(2); const addChannel = stub().resolves(2);
const plugin = await normalize( const plugin = await normalize(
{ cwd, options: {}, stderr: t.context.stderr, logger: t.context.logger }, { cwd, options: {}, stderr: t.context.stderr, logger: t.context.logger },
'addChannel', "addChannel",
addChannel, addChannel,
{} {}
); );
const error = await t.throwsAsync(plugin({ options: {} })); const error = await t.throwsAsync(plugin({ options: {} }));
t.is(error.code, 'EADDCHANNELOUTPUT'); t.is(error.code, "EADDCHANNELOUTPUT");
t.is(error.name, 'SemanticReleaseError'); t.is(error.name, "SemanticReleaseError");
t.truthy(error.message); t.truthy(error.message);
t.truthy(error.details); t.truthy(error.details);
t.regex(error.details, /2/); t.regex(error.details, /2/);
@ -172,59 +182,59 @@ test('Wrap "addChannel" plugin in a function that validate the output of the plu
test('Plugin is called with "pluginConfig" (with object definition) and input', async (t) => { test('Plugin is called with "pluginConfig" (with object definition) and input', async (t) => {
const pluginFunction = stub().resolves(); const pluginFunction = stub().resolves();
const pluginConf = {path: pluginFunction, conf: 'confValue'}; const pluginConf = { path: pluginFunction, conf: "confValue" };
const options = {global: 'globalValue'}; const options = { global: "globalValue" };
const plugin = await normalize({cwd, options, logger: t.context.logger}, '', pluginConf, {}); const plugin = await normalize({ cwd, options, logger: t.context.logger }, "", pluginConf, {});
await plugin({options: {}, param: 'param'}); await plugin({ options: {}, param: "param" });
t.true( t.true(
pluginFunction.calledWithMatch( pluginFunction.calledWithMatch(
{conf: 'confValue', global: 'globalValue'}, { conf: "confValue", global: "globalValue" },
{param: 'param', logger: t.context.logger} { param: "param", logger: t.context.logger }
) )
); );
}); });
test('Plugin is called with "pluginConfig" (with array definition) and input', async (t) => { test('Plugin is called with "pluginConfig" (with array definition) and input', async (t) => {
const pluginFunction = stub().resolves(); const pluginFunction = stub().resolves();
const pluginConf = [pluginFunction, {conf: 'confValue'}]; const pluginConf = [pluginFunction, { conf: "confValue" }];
const options = {global: 'globalValue'}; const options = { global: "globalValue" };
const plugin = await normalize({cwd, options, logger: t.context.logger}, '', pluginConf, {}); const plugin = await normalize({ cwd, options, logger: t.context.logger }, "", pluginConf, {});
await plugin({options: {}, param: 'param'}); await plugin({ options: {}, param: "param" });
t.true( t.true(
pluginFunction.calledWithMatch( pluginFunction.calledWithMatch(
{conf: 'confValue', global: 'globalValue'}, { conf: "confValue", global: "globalValue" },
{param: 'param', logger: t.context.logger} { param: "param", logger: t.context.logger }
) )
); );
}); });
test('Prevent plugins to modify "pluginConfig"', async (t) => { test('Prevent plugins to modify "pluginConfig"', async (t) => {
const pluginFunction = stub().callsFake((pluginConfig) => { const pluginFunction = stub().callsFake((pluginConfig) => {
pluginConfig.conf.subConf = 'otherConf'; pluginConfig.conf.subConf = "otherConf";
}); });
const pluginConf = {path: pluginFunction, conf: {subConf: 'originalConf'}}; const pluginConf = { path: pluginFunction, conf: { subConf: "originalConf" } };
const options = {globalConf: {globalSubConf: 'originalGlobalConf'}}; const options = { globalConf: { globalSubConf: "originalGlobalConf" } };
const plugin = await normalize({cwd, options, logger: t.context.logger}, '', pluginConf, {}); const plugin = await normalize({ cwd, options, logger: t.context.logger }, "", pluginConf, {});
await plugin({ options: {} }); await plugin({ options: {} });
t.is(pluginConf.conf.subConf, 'originalConf'); t.is(pluginConf.conf.subConf, "originalConf");
t.is(options.globalConf.globalSubConf, 'originalGlobalConf'); t.is(options.globalConf.globalSubConf, "originalGlobalConf");
}); });
test('Prevent plugins to modify its input', async (t) => { test("Prevent plugins to modify its input", async (t) => {
const pluginFunction = stub().callsFake((pluginConfig, options) => { const pluginFunction = stub().callsFake((pluginConfig, options) => {
options.param.subParam = 'otherParam'; options.param.subParam = "otherParam";
}); });
const input = {param: {subParam: 'originalSubParam'}, options: {}}; const input = { param: { subParam: "originalSubParam" }, options: {} };
const plugin = await normalize({cwd, options: {}, logger: t.context.logger}, '', pluginFunction, {}); const plugin = await normalize({ cwd, options: {}, logger: t.context.logger }, "", pluginFunction, {});
await plugin(input); await plugin(input);
t.is(input.param.subParam, 'originalSubParam'); t.is(input.param.subParam, "originalSubParam");
}); });
test('Return noop if the plugin is not defined', async (t) => { test("Return noop if the plugin is not defined", async (t) => {
const plugin = await normalize({ cwd, options: {}, logger: t.context.logger }); const plugin = await normalize({ cwd, options: {}, logger: t.context.logger });
t.is(plugin, noop); t.is(plugin, noop);
@ -234,8 +244,8 @@ test('Always pass a defined "pluginConfig" for plugin defined with string', asyn
// Call the normalize function with the path of a plugin that returns its config // Call the normalize function with the path of a plugin that returns its config
const plugin = await normalize( const plugin = await normalize(
{ cwd, options: {}, logger: t.context.logger }, { cwd, options: {}, logger: t.context.logger },
'', "",
'./test/fixtures/plugin-result-config', "./test/fixtures/plugin-result-config",
{} {}
); );
const pluginResult = await plugin({ options: {} }); const pluginResult = await plugin({ options: {} });
@ -247,8 +257,8 @@ test('Always pass a defined "pluginConfig" for plugin defined with path', async
// Call the normalize function with the path of a plugin that returns its config // Call the normalize function with the path of a plugin that returns its config
const plugin = await normalize( const plugin = await normalize(
{ cwd, options: {}, logger: t.context.logger }, { cwd, options: {}, logger: t.context.logger },
'', "",
{path: './test/fixtures/plugin-result-config'}, { path: "./test/fixtures/plugin-result-config" },
{} {}
); );
const pluginResult = await plugin({ options: {} }); const pluginResult = await plugin({ options: {} });
@ -256,23 +266,28 @@ test('Always pass a defined "pluginConfig" for plugin defined with path', async
t.deepEqual(pluginResult.pluginConfig, {}); t.deepEqual(pluginResult.pluginConfig, {});
}); });
test('Throws an error if the plugin return an object without the expected plugin function', async (t) => { test("Throws an error if the plugin return an object without the expected plugin function", async (t) => {
const error = await t.throwsAsync(() => const error = await t.throwsAsync(() =>
normalize({cwd, options: {}, logger: t.context.logger}, 'nonExistentPlugin', './test/fixtures/multi-plugin.cjs', {}) normalize(
{ cwd, options: {}, logger: t.context.logger },
"nonExistentPlugin",
"./test/fixtures/multi-plugin.cjs",
{}
)
); );
t.is(error.code, 'EPLUGIN'); t.is(error.code, "EPLUGIN");
t.is(error.name, 'SemanticReleaseError'); t.is(error.name, "SemanticReleaseError");
t.truthy(error.message); t.truthy(error.message);
t.truthy(error.details); t.truthy(error.details);
}); });
test('Throws an error if the plugin is not found', async (t) => { test("Throws an error if the plugin is not found", async (t) => {
await t.throwsAsync( await t.throwsAsync(
() => normalize({cwd, options: {}, logger: t.context.logger}, 'nonExistentPlugin', 'non-existing-path', {}), () => normalize({ cwd, options: {}, logger: t.context.logger }, "nonExistentPlugin", "non-existing-path", {}),
{ {
message: /Cannot find module 'non-existing-path'/, message: /Cannot find module 'non-existing-path'/,
code: 'MODULE_NOT_FOUND', code: "MODULE_NOT_FOUND",
instanceOf: Error, instanceOf: Error,
} }
); );

View File

@ -1,9 +1,9 @@
import test from 'ava'; import test from "ava";
import {stub} from 'sinon'; import { stub } from "sinon";
import AggregateError from 'aggregate-error'; import AggregateError from "aggregate-error";
import pipeline from '../../lib/plugins/pipeline.js'; import pipeline from "../../lib/plugins/pipeline.js";
test('Execute each function in series passing the same input', async (t) => { test("Execute each function in series passing the same input", async (t) => {
const step1 = stub().resolves(1); const step1 = stub().resolves(1);
const step2 = stub().resolves(2); const step2 = stub().resolves(2);
const step3 = stub().resolves(3); const step3 = stub().resolves(3);
@ -93,24 +93,24 @@ test('Execute each function in series calling "transform" to modify the results
]); ]);
}); });
test('Stop execution and throw error if a step rejects', async (t) => { test("Stop execution and throw error if a step rejects", async (t) => {
const step1 = stub().resolves(1); const step1 = stub().resolves(1);
const step2 = stub().rejects(new Error('test error')); const step2 = stub().rejects(new Error("test error"));
const step3 = stub().resolves(3); const step3 = stub().resolves(3);
const error = await t.throwsAsync(pipeline([step1, step2, step3])(0), { const error = await t.throwsAsync(pipeline([step1, step2, step3])(0), {
instanceOf: Error, instanceOf: Error,
message: 'test error', message: "test error",
}); });
t.is(error.message, 'test error'); t.is(error.message, "test error");
t.true(step1.calledWith(0)); t.true(step1.calledWith(0));
t.true(step2.calledWith(0)); t.true(step2.calledWith(0));
t.true(step3.notCalled); t.true(step3.notCalled);
}); });
test('Throw all errors from the first step throwing an AggregateError', async (t) => { test("Throw all errors from the first step throwing an AggregateError", async (t) => {
const error1 = new Error('test error 1'); const error1 = new Error("test error 1");
const error2 = new Error('test error 2'); const error2 = new Error("test error 2");
const step1 = stub().resolves(1); const step1 = stub().resolves(1);
const step2 = stub().rejects(new AggregateError([error1, error2])); const step2 = stub().rejects(new AggregateError([error1, error2]));
@ -124,9 +124,9 @@ test('Throw all errors from the first step throwing an AggregateError', async (t
t.true(step3.notCalled); t.true(step3.notCalled);
}); });
test('Execute all even if a Promise rejects', async (t) => { test("Execute all even if a Promise rejects", async (t) => {
const error1 = new Error('test error 1'); const error1 = new Error("test error 1");
const error2 = new Error('test error 2'); const error2 = new Error("test error 2");
const step1 = stub().resolves(1); const step1 = stub().resolves(1);
const step2 = stub().rejects(error1); const step2 = stub().rejects(error1);
const step3 = stub().rejects(error2); const step3 = stub().rejects(error2);
@ -139,11 +139,11 @@ test('Execute all even if a Promise rejects', async (t) => {
t.true(step3.calledWith(0)); t.true(step3.calledWith(0));
}); });
test('Throw all errors from all steps throwing an AggregateError', async (t) => { test("Throw all errors from all steps throwing an AggregateError", async (t) => {
const error1 = new Error('test error 1'); const error1 = new Error("test error 1");
const error2 = new Error('test error 2'); const error2 = new Error("test error 2");
const error3 = new Error('test error 3'); const error3 = new Error("test error 3");
const error4 = new Error('test error 4'); const error4 = new Error("test error 4");
const step1 = stub().rejects(new AggregateError([error1, error2])); const step1 = stub().rejects(new AggregateError([error1, error2]));
const step2 = stub().rejects(new AggregateError([error3, error4])); const step2 = stub().rejects(new AggregateError([error3, error4]));
@ -154,9 +154,9 @@ test('Throw all errors from all steps throwing an AggregateError', async (t) =>
t.true(step2.calledWith(0)); t.true(step2.calledWith(0));
}); });
test('Execute each function in series passing a transformed input even if a step rejects', async (t) => { test("Execute each function in series passing a transformed input even if a step rejects", async (t) => {
const error2 = new Error('test error 2'); const error2 = new Error("test error 2");
const error3 = new Error('test error 3'); const error3 = new Error("test error 3");
const step1 = stub().resolves(1); const step1 = stub().resolves(1);
const step2 = stub().rejects(error2); const step2 = stub().rejects(error2);
const step3 = stub().rejects(error3); const step3 = stub().rejects(error3);

View File

@ -1,9 +1,9 @@
import path from 'path'; import path from "path";
import test from 'ava'; import test from "ava";
import {copy, outputFile} from 'fs-extra'; import { copy, outputFile } from "fs-extra";
import {stub} from 'sinon'; import { stub } from "sinon";
import {temporaryDirectory} from 'tempy'; import { temporaryDirectory } from "tempy";
import getPlugins from '../../lib/plugins/index.js'; import getPlugins from "../../lib/plugins/index.js";
// Save the current working directory // Save the current working directory
const cwd = process.cwd(); const cwd = process.cwd();
@ -15,29 +15,29 @@ test.beforeEach((t) => {
t.context.logger = { log: t.context.log, success: t.context.success, scope: () => t.context.logger }; t.context.logger = { log: t.context.log, success: t.context.success, scope: () => t.context.logger };
}); });
test('Export default plugins', async (t) => { test("Export default plugins", async (t) => {
const plugins = await getPlugins({ cwd, options: {}, logger: t.context.logger }, {}); const plugins = await getPlugins({ cwd, options: {}, logger: t.context.logger }, {});
// Verify the module returns a function for each plugin // Verify the module returns a function for each plugin
t.is(typeof plugins.verifyConditions, 'function'); t.is(typeof plugins.verifyConditions, "function");
t.is(typeof plugins.analyzeCommits, 'function'); t.is(typeof plugins.analyzeCommits, "function");
t.is(typeof plugins.verifyRelease, 'function'); t.is(typeof plugins.verifyRelease, "function");
t.is(typeof plugins.generateNotes, 'function'); t.is(typeof plugins.generateNotes, "function");
t.is(typeof plugins.prepare, 'function'); t.is(typeof plugins.prepare, "function");
t.is(typeof plugins.publish, 'function'); t.is(typeof plugins.publish, "function");
t.is(typeof plugins.success, 'function'); t.is(typeof plugins.success, "function");
t.is(typeof plugins.fail, 'function'); t.is(typeof plugins.fail, "function");
}); });
test('Export plugins based on steps config', async (t) => { test("Export plugins based on steps config", async (t) => {
const plugins = await getPlugins( const plugins = await getPlugins(
{ {
cwd, cwd,
logger: t.context.logger, logger: t.context.logger,
options: { options: {
verifyConditions: ['./test/fixtures/plugin-noop.cjs', {path: './test/fixtures/plugin-noop.cjs'}], verifyConditions: ["./test/fixtures/plugin-noop.cjs", { path: "./test/fixtures/plugin-noop.cjs" }],
generateNotes: './test/fixtures/plugin-noop.cjs', generateNotes: "./test/fixtures/plugin-noop.cjs",
analyzeCommits: {path: './test/fixtures/plugin-noop.cjs'}, analyzeCommits: { path: "./test/fixtures/plugin-noop.cjs" },
verifyRelease: () => {}, verifyRelease: () => {},
}, },
}, },
@ -45,14 +45,14 @@ test('Export plugins based on steps config', async (t) => {
); );
// Verify the module returns a function for each plugin // Verify the module returns a function for each plugin
t.is(typeof plugins.verifyConditions, 'function'); t.is(typeof plugins.verifyConditions, "function");
t.is(typeof plugins.analyzeCommits, 'function'); t.is(typeof plugins.analyzeCommits, "function");
t.is(typeof plugins.verifyRelease, 'function'); t.is(typeof plugins.verifyRelease, "function");
t.is(typeof plugins.generateNotes, 'function'); t.is(typeof plugins.generateNotes, "function");
t.is(typeof plugins.prepare, 'function'); t.is(typeof plugins.prepare, "function");
t.is(typeof plugins.publish, 'function'); t.is(typeof plugins.publish, "function");
t.is(typeof plugins.success, 'function'); t.is(typeof plugins.success, "function");
t.is(typeof plugins.fail, 'function'); t.is(typeof plugins.fail, "function");
}); });
test('Export plugins based on "plugins" config (array)', async (t) => { test('Export plugins based on "plugins" config (array)', async (t) => {
@ -73,14 +73,14 @@ test('Export plugins based on "plugins" config (array)', async (t) => {
t.true(plugin2.verifyRelease.notCalled); t.true(plugin2.verifyRelease.notCalled);
// Verify the module returns a function for each plugin // Verify the module returns a function for each plugin
t.is(typeof plugins.verifyConditions, 'function'); t.is(typeof plugins.verifyConditions, "function");
t.is(typeof plugins.analyzeCommits, 'function'); t.is(typeof plugins.analyzeCommits, "function");
t.is(typeof plugins.verifyRelease, 'function'); t.is(typeof plugins.verifyRelease, "function");
t.is(typeof plugins.generateNotes, 'function'); t.is(typeof plugins.generateNotes, "function");
t.is(typeof plugins.prepare, 'function'); t.is(typeof plugins.prepare, "function");
t.is(typeof plugins.publish, 'function'); t.is(typeof plugins.publish, "function");
t.is(typeof plugins.success, 'function'); t.is(typeof plugins.success, "function");
t.is(typeof plugins.fail, 'function'); t.is(typeof plugins.fail, "function");
}); });
test('Export plugins based on "plugins" config (single definition)', async (t) => { test('Export plugins based on "plugins" config (single definition)', async (t) => {
@ -94,112 +94,112 @@ test('Export plugins based on "plugins" config (single definition)', async (t) =
t.true(plugin1.publish.calledOnce); t.true(plugin1.publish.calledOnce);
// Verify the module returns a function for each plugin // Verify the module returns a function for each plugin
t.is(typeof plugins.verifyConditions, 'function'); t.is(typeof plugins.verifyConditions, "function");
t.is(typeof plugins.analyzeCommits, 'function'); t.is(typeof plugins.analyzeCommits, "function");
t.is(typeof plugins.verifyRelease, 'function'); t.is(typeof plugins.verifyRelease, "function");
t.is(typeof plugins.generateNotes, 'function'); t.is(typeof plugins.generateNotes, "function");
t.is(typeof plugins.prepare, 'function'); t.is(typeof plugins.prepare, "function");
t.is(typeof plugins.publish, 'function'); t.is(typeof plugins.publish, "function");
t.is(typeof plugins.success, 'function'); t.is(typeof plugins.success, "function");
t.is(typeof plugins.fail, 'function'); t.is(typeof plugins.fail, "function");
}); });
test('Merge global options, "plugins" options and step options', async (t) => { test('Merge global options, "plugins" options and step options', async (t) => {
const plugin1 = [{verifyConditions: stub(), publish: stub()}, {pluginOpt1: 'plugin1'}]; const plugin1 = [{ verifyConditions: stub(), publish: stub() }, { pluginOpt1: "plugin1" }];
const plugin2 = [{verifyConditions: stub()}, {pluginOpt2: 'plugin2'}]; const plugin2 = [{ verifyConditions: stub() }, { pluginOpt2: "plugin2" }];
const plugin3 = [stub(), {pluginOpt3: 'plugin3'}]; const plugin3 = [stub(), { pluginOpt3: "plugin3" }];
const plugins = await getPlugins( const plugins = await getPlugins(
{ {
cwd, cwd,
logger: t.context.logger, logger: t.context.logger,
options: {globalOpt: 'global', plugins: [plugin1, plugin2], verifyRelease: [plugin3]}, options: { globalOpt: "global", plugins: [plugin1, plugin2], verifyRelease: [plugin3] },
}, },
{} {}
); );
await plugins.verifyConditions({ options: {} }); await plugins.verifyConditions({ options: {} });
t.deepEqual(plugin1[0].verifyConditions.args[0][0], {globalOpt: 'global', pluginOpt1: 'plugin1'}); t.deepEqual(plugin1[0].verifyConditions.args[0][0], { globalOpt: "global", pluginOpt1: "plugin1" });
t.deepEqual(plugin2[0].verifyConditions.args[0][0], {globalOpt: 'global', pluginOpt2: 'plugin2'}); t.deepEqual(plugin2[0].verifyConditions.args[0][0], { globalOpt: "global", pluginOpt2: "plugin2" });
await plugins.publish({ options: {} }); await plugins.publish({ options: {} });
t.deepEqual(plugin1[0].publish.args[0][0], {globalOpt: 'global', pluginOpt1: 'plugin1'}); t.deepEqual(plugin1[0].publish.args[0][0], { globalOpt: "global", pluginOpt1: "plugin1" });
await plugins.verifyRelease({ options: {} }); await plugins.verifyRelease({ options: {} });
t.deepEqual(plugin3[0].args[0][0], {globalOpt: 'global', pluginOpt3: 'plugin3'}); t.deepEqual(plugin3[0].args[0][0], { globalOpt: "global", pluginOpt3: "plugin3" });
}); });
test('Unknown steps of plugins configured in "plugins" are ignored', async (t) => { test('Unknown steps of plugins configured in "plugins" are ignored', async (t) => {
const plugin1 = { verifyConditions: () => {}, unknown: () => {} }; const plugin1 = { verifyConditions: () => {}, unknown: () => {} };
const plugins = await getPlugins({ cwd, logger: t.context.logger, options: { plugins: [plugin1] } }, {}); const plugins = await getPlugins({ cwd, logger: t.context.logger, options: { plugins: [plugin1] } }, {});
t.is(typeof plugins.verifyConditions, 'function'); t.is(typeof plugins.verifyConditions, "function");
t.is(plugins.unknown, undefined); t.is(plugins.unknown, undefined);
}); });
test('Export plugins loaded from the dependency of a shareable config module', async (t) => { test("Export plugins loaded from the dependency of a shareable config module", async (t) => {
const cwd = temporaryDirectory(); const cwd = temporaryDirectory();
await copy( await copy(
'./test/fixtures/plugin-noop.cjs', "./test/fixtures/plugin-noop.cjs",
path.resolve(cwd, 'node_modules/shareable-config/node_modules/custom-plugin/index.js') path.resolve(cwd, "node_modules/shareable-config/node_modules/custom-plugin/index.js")
); );
await outputFile(path.resolve(cwd, 'node_modules/shareable-config/index.js'), ''); await outputFile(path.resolve(cwd, "node_modules/shareable-config/index.js"), "");
const plugins = await getPlugins( const plugins = await getPlugins(
{ {
cwd, cwd,
logger: t.context.logger, logger: t.context.logger,
options: { options: {
verifyConditions: ['custom-plugin', {path: 'custom-plugin'}], verifyConditions: ["custom-plugin", { path: "custom-plugin" }],
generateNotes: 'custom-plugin', generateNotes: "custom-plugin",
analyzeCommits: {path: 'custom-plugin'}, analyzeCommits: { path: "custom-plugin" },
verifyRelease: () => {}, verifyRelease: () => {},
}, },
}, },
{'custom-plugin': 'shareable-config'} { "custom-plugin": "shareable-config" }
); );
// Verify the module returns a function for each plugin // Verify the module returns a function for each plugin
t.is(typeof plugins.verifyConditions, 'function'); t.is(typeof plugins.verifyConditions, "function");
t.is(typeof plugins.analyzeCommits, 'function'); t.is(typeof plugins.analyzeCommits, "function");
t.is(typeof plugins.verifyRelease, 'function'); t.is(typeof plugins.verifyRelease, "function");
t.is(typeof plugins.generateNotes, 'function'); t.is(typeof plugins.generateNotes, "function");
t.is(typeof plugins.prepare, 'function'); t.is(typeof plugins.prepare, "function");
t.is(typeof plugins.publish, 'function'); t.is(typeof plugins.publish, "function");
t.is(typeof plugins.success, 'function'); t.is(typeof plugins.success, "function");
t.is(typeof plugins.fail, 'function'); t.is(typeof plugins.fail, "function");
}); });
test('Export plugins loaded from the dependency of a shareable config file', async (t) => { test("Export plugins loaded from the dependency of a shareable config file", async (t) => {
const cwd = temporaryDirectory(); const cwd = temporaryDirectory();
await copy('./test/fixtures/plugin-noop.cjs', path.resolve(cwd, 'plugin/plugin-noop.cjs')); await copy("./test/fixtures/plugin-noop.cjs", path.resolve(cwd, "plugin/plugin-noop.cjs"));
await outputFile(path.resolve(cwd, 'shareable-config.js'), ''); await outputFile(path.resolve(cwd, "shareable-config.js"), "");
const plugins = await getPlugins( const plugins = await getPlugins(
{ {
cwd, cwd,
logger: t.context.logger, logger: t.context.logger,
options: { options: {
verifyConditions: ['./plugin/plugin-noop.cjs', {path: './plugin/plugin-noop.cjs'}], verifyConditions: ["./plugin/plugin-noop.cjs", { path: "./plugin/plugin-noop.cjs" }],
generateNotes: './plugin/plugin-noop.cjs', generateNotes: "./plugin/plugin-noop.cjs",
analyzeCommits: {path: './plugin/plugin-noop.cjs'}, analyzeCommits: { path: "./plugin/plugin-noop.cjs" },
verifyRelease: () => {}, verifyRelease: () => {},
}, },
}, },
{'./plugin/plugin-noop': './shareable-config.js'} { "./plugin/plugin-noop": "./shareable-config.js" }
); );
// Verify the module returns a function for each plugin // Verify the module returns a function for each plugin
t.is(typeof plugins.verifyConditions, 'function'); t.is(typeof plugins.verifyConditions, "function");
t.is(typeof plugins.analyzeCommits, 'function'); t.is(typeof plugins.analyzeCommits, "function");
t.is(typeof plugins.verifyRelease, 'function'); t.is(typeof plugins.verifyRelease, "function");
t.is(typeof plugins.generateNotes, 'function'); t.is(typeof plugins.generateNotes, "function");
t.is(typeof plugins.prepare, 'function'); t.is(typeof plugins.prepare, "function");
t.is(typeof plugins.publish, 'function'); t.is(typeof plugins.publish, "function");
t.is(typeof plugins.success, 'function'); t.is(typeof plugins.success, "function");
t.is(typeof plugins.fail, 'function'); t.is(typeof plugins.fail, "function");
}); });
test('Use default when only options are passed for a single plugin', async (t) => { test("Use default when only options are passed for a single plugin", async (t) => {
const analyzeCommits = {}; const analyzeCommits = {};
const generateNotes = {}; const generateNotes = {};
const publish = {}; const publish = {};
@ -211,7 +211,7 @@ test('Use default when only options are passed for a single plugin', async (t) =
cwd, cwd,
logger: t.context.logger, logger: t.context.logger,
options: { options: {
plugins: ['@semantic-release/commit-analyzer', '@semantic-release/release-notes-generator'], plugins: ["@semantic-release/commit-analyzer", "@semantic-release/release-notes-generator"],
analyzeCommits, analyzeCommits,
generateNotes, generateNotes,
publish, publish,
@ -223,25 +223,25 @@ test('Use default when only options are passed for a single plugin', async (t) =
); );
// Verify the module returns a function for each plugin // Verify the module returns a function for each plugin
t.is(typeof plugins.analyzeCommits, 'function'); t.is(typeof plugins.analyzeCommits, "function");
t.is(typeof plugins.generateNotes, 'function'); t.is(typeof plugins.generateNotes, "function");
t.is(typeof plugins.success, 'function'); t.is(typeof plugins.success, "function");
t.is(typeof plugins.fail, 'function'); t.is(typeof plugins.fail, "function");
// Verify only the plugins defined as an object with no `path` are set to the default value // Verify only the plugins defined as an object with no `path` are set to the default value
t.falsy(success.path); t.falsy(success.path);
t.falsy(fail.path); t.falsy(fail.path);
}); });
test('Merge global options with plugin options', async (t) => { test("Merge global options with plugin options", async (t) => {
const plugins = await getPlugins( const plugins = await getPlugins(
{ {
cwd, cwd,
logger: t.context.logger, logger: t.context.logger,
options: { options: {
globalOpt: 'global', globalOpt: "global",
otherOpt: 'globally-defined', otherOpt: "globally-defined",
verifyRelease: {path: './test/fixtures/plugin-result-config', localOpt: 'local', otherOpt: 'locally-defined'}, verifyRelease: { path: "./test/fixtures/plugin-result-config", localOpt: "local", otherOpt: "locally-defined" },
}, },
}, },
{} {}
@ -249,18 +249,19 @@ test('Merge global options with plugin options', async (t) => {
const [result] = await plugins.verifyRelease({ options: {} }); const [result] = await plugins.verifyRelease({ options: {} });
t.deepEqual(result.pluginConfig, {localOpt: 'local', globalOpt: 'global', otherOpt: 'locally-defined'}); t.deepEqual(result.pluginConfig, { localOpt: "local", globalOpt: "global", otherOpt: "locally-defined" });
}); });
test('Throw an error for each invalid plugin configuration', async (t) => { test("Throw an error for each invalid plugin configuration", async (t) => {
const errors = [ const errors = [
...(await t.throwsAsync(() => ...(
await t.throwsAsync(() =>
getPlugins( getPlugins(
{ {
cwd, cwd,
logger: t.context.logger, logger: t.context.logger,
options: { options: {
plugins: ['@semantic-release/commit-analyzer', '@semantic-release/release-notes-generator'], plugins: ["@semantic-release/commit-analyzer", "@semantic-release/release-notes-generator"],
verifyConditions: 1, verifyConditions: 1,
analyzeCommits: [], analyzeCommits: [],
verifyRelease: [{}], verifyRelease: [{}],
@ -269,50 +270,55 @@ test('Throw an error for each invalid plugin configuration', async (t) => {
}, },
{} {}
) )
)).errors, )
).errors,
]; ];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, "SemanticReleaseError");
t.is(errors[0].code, 'EPLUGINCONF'); t.is(errors[0].code, "EPLUGINCONF");
t.is(errors[1].name, 'SemanticReleaseError'); t.is(errors[1].name, "SemanticReleaseError");
t.is(errors[1].code, 'EPLUGINCONF'); t.is(errors[1].code, "EPLUGINCONF");
t.is(errors[2].name, 'SemanticReleaseError'); t.is(errors[2].name, "SemanticReleaseError");
t.is(errors[2].code, 'EPLUGINCONF'); t.is(errors[2].code, "EPLUGINCONF");
t.is(errors[3].name, 'SemanticReleaseError'); t.is(errors[3].name, "SemanticReleaseError");
t.is(errors[3].code, 'EPLUGINCONF'); t.is(errors[3].code, "EPLUGINCONF");
}); });
test('Throw EPLUGINSCONF error if the "plugins" option contains an old plugin definition (returns a function)', async (t) => { test('Throw EPLUGINSCONF error if the "plugins" option contains an old plugin definition (returns a function)', async (t) => {
const errors = [ const errors = [
...(await t.throwsAsync(() => ...(
await t.throwsAsync(() =>
getPlugins( getPlugins(
{ {
cwd, cwd,
logger: t.context.logger, logger: t.context.logger,
options: {plugins: ['./test/fixtures/multi-plugin.cjs', './test/fixtures/plugin-noop.cjs', () => {}]}, options: { plugins: ["./test/fixtures/multi-plugin.cjs", "./test/fixtures/plugin-noop.cjs", () => {}] },
}, },
{} {}
) )
)).errors, )
).errors,
]; ];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, "SemanticReleaseError");
t.is(errors[0].code, 'EPLUGINSCONF'); t.is(errors[0].code, "EPLUGINSCONF");
t.is(errors[1].name, 'SemanticReleaseError'); t.is(errors[1].name, "SemanticReleaseError");
t.is(errors[1].code, 'EPLUGINSCONF'); t.is(errors[1].code, "EPLUGINSCONF");
}); });
test('Throw EPLUGINSCONF error for each invalid definition if the "plugins" option', async (t) => { test('Throw EPLUGINSCONF error for each invalid definition if the "plugins" option', async (t) => {
const errors = [ const errors = [
...(await t.throwsAsync(() => ...(
await t.throwsAsync(() =>
getPlugins({ cwd, logger: t.context.logger, options: { plugins: [1, { path: 1 }, [() => {}, {}, {}]] } }, {}) getPlugins({ cwd, logger: t.context.logger, options: { plugins: [1, { path: 1 }, [() => {}, {}, {}]] } }, {})
)).errors, )
).errors,
]; ];
t.is(errors[0].name, 'SemanticReleaseError'); t.is(errors[0].name, "SemanticReleaseError");
t.is(errors[0].code, 'EPLUGINSCONF'); t.is(errors[0].code, "EPLUGINSCONF");
t.is(errors[1].name, 'SemanticReleaseError'); t.is(errors[1].name, "SemanticReleaseError");
t.is(errors[1].code, 'EPLUGINSCONF'); t.is(errors[1].code, "EPLUGINSCONF");
t.is(errors[2].name, 'SemanticReleaseError'); t.is(errors[2].name, "SemanticReleaseError");
t.is(errors[2].code, 'EPLUGINSCONF'); t.is(errors[2].code, "EPLUGINSCONF");
}); });

View File

@ -1,31 +1,31 @@
import test from 'ava'; import test from "ava";
import {loadPlugin, parseConfig, validatePlugin, validateStep} from '../../lib/plugins/utils.js'; import { loadPlugin, parseConfig, validatePlugin, validateStep } from "../../lib/plugins/utils.js";
test('validatePlugin', (t) => { test("validatePlugin", (t) => {
const path = 'plugin-module'; const path = "plugin-module";
const options = {option1: 'value1', option2: 'value2'}; const options = { option1: "value1", option2: "value2" };
t.true(validatePlugin(path), 'String definition'); t.true(validatePlugin(path), "String definition");
t.true(validatePlugin({publish: () => {}}), 'Object definition'); t.true(validatePlugin({ publish: () => {} }), "Object definition");
t.true(validatePlugin([path]), 'Array definition'); t.true(validatePlugin([path]), "Array definition");
t.true(validatePlugin([path, options]), 'Array definition with options'); t.true(validatePlugin([path, options]), "Array definition with options");
t.true(validatePlugin([{publish: () => {}}, options]), 'Array definition with options and path as object'); t.true(validatePlugin([{ publish: () => {} }, options]), "Array definition with options and path as object");
t.true(validatePlugin({path}), 'Object with path definition'); t.true(validatePlugin({ path }), "Object with path definition");
t.true(validatePlugin({path, ...options}), 'Object with path definition with options'); t.true(validatePlugin({ path, ...options }), "Object with path definition with options");
t.true( t.true(
validatePlugin({ path: { publish: () => {} }, ...options }), validatePlugin({ path: { publish: () => {} }, ...options }),
'Object with path definition with options and path as object' "Object with path definition with options and path as object"
); );
t.false(validatePlugin(1), 'String definition, wrong path'); t.false(validatePlugin(1), "String definition, wrong path");
t.false(validatePlugin([]), 'Array definition, missing path'); t.false(validatePlugin([]), "Array definition, missing path");
t.false(validatePlugin([path, options, {}]), 'Array definition, additional parameter'); t.false(validatePlugin([path, options, {}]), "Array definition, additional parameter");
t.false(validatePlugin([1]), 'Array definition, wrong path'); t.false(validatePlugin([1]), "Array definition, wrong path");
t.false(validatePlugin([path, 1]), 'Array definition, wrong options'); t.false(validatePlugin([path, 1]), "Array definition, wrong options");
t.false(validatePlugin({path: 1}), 'Object definition, wrong path'); t.false(validatePlugin({ path: 1 }), "Object definition, wrong path");
}); });
test('validateStep: optional plugin configuration', (t) => { test("validateStep: optional plugin configuration", (t) => {
const type = { multiple: true, required: false }; const type = { multiple: true, required: false };
// Empty config // Empty config
@ -33,81 +33,81 @@ test('validateStep: optional plugin configuration', (t) => {
t.true(validateStep(type, [])); t.true(validateStep(type, []));
// Single value definition // Single value definition
t.true(validateStep(type, 'plugin-path.js')); t.true(validateStep(type, "plugin-path.js"));
t.true(validateStep(type, () => {})); t.true(validateStep(type, () => {}));
t.true(validateStep(type, ['plugin-path.js'])); t.true(validateStep(type, ["plugin-path.js"]));
t.true(validateStep(type, [() => {}])); t.true(validateStep(type, [() => {}]));
t.false(validateStep(type, {})); t.false(validateStep(type, {}));
t.false(validateStep(type, [{}])); t.false(validateStep(type, [{}]));
// Array type definition // Array type definition
t.true(validateStep(type, [['plugin-path.js']])); t.true(validateStep(type, [["plugin-path.js"]]));
t.true(validateStep(type, [['plugin-path.js', {options: 'value'}]])); t.true(validateStep(type, [["plugin-path.js", { options: "value" }]]));
t.true(validateStep(type, [[() => {}, {options: 'value'}]])); t.true(validateStep(type, [[() => {}, { options: "value" }]]));
t.false(validateStep(type, [['plugin-path.js', 1]])); t.false(validateStep(type, [["plugin-path.js", 1]]));
// Object type definition // Object type definition
t.true(validateStep(type, {path: 'plugin-path.js'})); t.true(validateStep(type, { path: "plugin-path.js" }));
t.true(validateStep(type, {path: 'plugin-path.js', options: 'value'})); t.true(validateStep(type, { path: "plugin-path.js", options: "value" }));
t.true(validateStep(type, {path: () => {}, options: 'value'})); t.true(validateStep(type, { path: () => {}, options: "value" }));
t.false(validateStep(type, { path: null })); t.false(validateStep(type, { path: null }));
// Considered as an Array of 2 definitions and not as one Array definition in case of a muliple plugin type // Considered as an Array of 2 definitions and not as one Array definition in case of a muliple plugin type
t.false(validateStep(type, [() => {}, {options: 'value'}])); t.false(validateStep(type, [() => {}, { options: "value" }]));
t.false(validateStep(type, ['plugin-path.js', {options: 'value'}])); t.false(validateStep(type, ["plugin-path.js", { options: "value" }]));
// Multiple definitions // Multiple definitions
t.true( t.true(
validateStep(type, [ validateStep(type, [
'plugin-path.js', "plugin-path.js",
() => {}, () => {},
['plugin-path.js'], ["plugin-path.js"],
['plugin-path.js', {options: 'value'}], ["plugin-path.js", { options: "value" }],
[() => {}, {options: 'value'}], [() => {}, { options: "value" }],
{path: 'plugin-path.js'}, { path: "plugin-path.js" },
{path: 'plugin-path.js', options: 'value'}, { path: "plugin-path.js", options: "value" },
{path: () => {}, options: 'value'}, { path: () => {}, options: "value" },
]) ])
); );
t.false( t.false(
validateStep(type, [ validateStep(type, [
'plugin-path.js', "plugin-path.js",
() => {}, () => {},
['plugin-path.js'], ["plugin-path.js"],
['plugin-path.js', 1], ["plugin-path.js", 1],
[() => {}, {options: 'value'}], [() => {}, { options: "value" }],
{path: 'plugin-path.js'}, { path: "plugin-path.js" },
{path: 'plugin-path.js', options: 'value'}, { path: "plugin-path.js", options: "value" },
{path: () => {}, options: 'value'}, { path: () => {}, options: "value" },
]) ])
); );
t.false( t.false(
validateStep(type, [ validateStep(type, [
'plugin-path.js', "plugin-path.js",
{}, {},
['plugin-path.js'], ["plugin-path.js"],
['plugin-path.js', {options: 'value'}], ["plugin-path.js", { options: "value" }],
[() => {}, {options: 'value'}], [() => {}, { options: "value" }],
{path: 'plugin-path.js'}, { path: "plugin-path.js" },
{path: 'plugin-path.js', options: 'value'}, { path: "plugin-path.js", options: "value" },
{path: () => {}, options: 'value'}, { path: () => {}, options: "value" },
]) ])
); );
t.false( t.false(
validateStep(type, [ validateStep(type, [
'plugin-path.js', "plugin-path.js",
() => {}, () => {},
['plugin-path.js'], ["plugin-path.js"],
['plugin-path.js', {options: 'value'}], ["plugin-path.js", { options: "value" }],
[() => {}, {options: 'value'}], [() => {}, { options: "value" }],
{ path: null }, { path: null },
{path: 'plugin-path.js', options: 'value'}, { path: "plugin-path.js", options: "value" },
{path: () => {}, options: 'value'}, { path: () => {}, options: "value" },
]) ])
); );
}); });
test('validateStep: required plugin configuration', (t) => { test("validateStep: required plugin configuration", (t) => {
const type = { required: true }; const type = { required: true };
// Empty config // Empty config
@ -115,100 +115,110 @@ test('validateStep: required plugin configuration', (t) => {
t.false(validateStep(type, [])); t.false(validateStep(type, []));
// Single value definition // Single value definition
t.true(validateStep(type, 'plugin-path.js')); t.true(validateStep(type, "plugin-path.js"));
t.true(validateStep(type, () => {})); t.true(validateStep(type, () => {}));
t.true(validateStep(type, ['plugin-path.js'])); t.true(validateStep(type, ["plugin-path.js"]));
t.true(validateStep(type, [() => {}])); t.true(validateStep(type, [() => {}]));
t.false(validateStep(type, {})); t.false(validateStep(type, {}));
t.false(validateStep(type, [{}])); t.false(validateStep(type, [{}]));
// Array type definition // Array type definition
t.true(validateStep(type, [['plugin-path.js']])); t.true(validateStep(type, [["plugin-path.js"]]));
t.true(validateStep(type, [['plugin-path.js', {options: 'value'}]])); t.true(validateStep(type, [["plugin-path.js", { options: "value" }]]));
t.true(validateStep(type, [[() => {}, {options: 'value'}]])); t.true(validateStep(type, [[() => {}, { options: "value" }]]));
t.false(validateStep(type, [['plugin-path.js', 1]])); t.false(validateStep(type, [["plugin-path.js", 1]]));
// Object type definition // Object type definition
t.true(validateStep(type, {path: 'plugin-path.js'})); t.true(validateStep(type, { path: "plugin-path.js" }));
t.true(validateStep(type, {path: 'plugin-path.js', options: 'value'})); t.true(validateStep(type, { path: "plugin-path.js", options: "value" }));
t.true(validateStep(type, {path: () => {}, options: 'value'})); t.true(validateStep(type, { path: () => {}, options: "value" }));
t.false(validateStep(type, { path: null })); t.false(validateStep(type, { path: null }));
// Considered as an Array of 2 definitions and not as one Array definition in the case of a muliple plugin type // Considered as an Array of 2 definitions and not as one Array definition in the case of a muliple plugin type
t.false(validateStep(type, [() => {}, {options: 'value'}])); t.false(validateStep(type, [() => {}, { options: "value" }]));
t.false(validateStep(type, ['plugin-path.js', {options: 'value'}])); t.false(validateStep(type, ["plugin-path.js", { options: "value" }]));
// Multiple definitions // Multiple definitions
t.true( t.true(
validateStep(type, [ validateStep(type, [
'plugin-path.js', "plugin-path.js",
() => {}, () => {},
['plugin-path.js'], ["plugin-path.js"],
['plugin-path.js', {options: 'value'}], ["plugin-path.js", { options: "value" }],
[() => {}, {options: 'value'}], [() => {}, { options: "value" }],
{path: 'plugin-path.js'}, { path: "plugin-path.js" },
{path: 'plugin-path.js', options: 'value'}, { path: "plugin-path.js", options: "value" },
{path: () => {}, options: 'value'}, { path: () => {}, options: "value" },
]) ])
); );
t.false( t.false(
validateStep(type, [ validateStep(type, [
'plugin-path.js', "plugin-path.js",
() => {}, () => {},
['plugin-path.js'], ["plugin-path.js"],
['plugin-path.js', 1], ["plugin-path.js", 1],
[() => {}, {options: 'value'}], [() => {}, { options: "value" }],
{path: 'plugin-path.js'}, { path: "plugin-path.js" },
{path: 'plugin-path.js', options: 'value'}, { path: "plugin-path.js", options: "value" },
{path: () => {}, options: 'value'}, { path: () => {}, options: "value" },
]) ])
); );
t.false( t.false(
validateStep(type, [ validateStep(type, [
'plugin-path.js', "plugin-path.js",
{}, {},
['plugin-path.js'], ["plugin-path.js"],
['plugin-path.js', {options: 'value'}], ["plugin-path.js", { options: "value" }],
[() => {}, {options: 'value'}], [() => {}, { options: "value" }],
{path: 'plugin-path.js'}, { path: "plugin-path.js" },
{path: 'plugin-path.js', options: 'value'}, { path: "plugin-path.js", options: "value" },
{path: () => {}, options: 'value'}, { path: () => {}, options: "value" },
]) ])
); );
t.false( t.false(
validateStep(type, [ validateStep(type, [
'plugin-path.js', "plugin-path.js",
() => {}, () => {},
['plugin-path.js'], ["plugin-path.js"],
['plugin-path.js', {options: 'value'}], ["plugin-path.js", { options: "value" }],
[() => {}, {options: 'value'}], [() => {}, { options: "value" }],
{ path: null }, { path: null },
{path: 'plugin-path.js', options: 'value'}, { path: "plugin-path.js", options: "value" },
{path: () => {}, options: 'value'}, { path: () => {}, options: "value" },
]) ])
); );
}); });
test('loadPlugin', async (t) => { test("loadPlugin", async (t) => {
const cwd = process.cwd(); const cwd = process.cwd();
const func = () => {}; const func = () => {};
t.is((await import('../fixtures/plugin-noop.cjs')).default, await loadPlugin({cwd: './test/fixtures'}, './plugin-noop.cjs', {}), 'From cwd');
t.is( t.is(
(await import('../fixtures/plugin-noop.cjs')).default, (await import("../fixtures/plugin-noop.cjs")).default,
await loadPlugin({cwd}, './plugin-noop.cjs', {'./plugin-noop.cjs': './test/fixtures'}), await loadPlugin({ cwd: "./test/fixtures" }, "./plugin-noop.cjs", {}),
'From a shareable config context' "From cwd"
); );
t.is(func, await loadPlugin({cwd}, func, {}), 'Defined as a function'); t.is(
(await import("../fixtures/plugin-noop.cjs")).default,
await loadPlugin({ cwd }, "./plugin-noop.cjs", { "./plugin-noop.cjs": "./test/fixtures" }),
"From a shareable config context"
);
const { ...namedExports } = await import("../fixtures/plugin-esm-named-exports.js");
const plugin = await loadPlugin({ cwd }, "./plugin-esm-named-exports.js", {
"./plugin-esm-named-exports.js": "./test/fixtures",
}); });
test('parseConfig', (t) => { t.deepEqual(namedExports, plugin, "ESM with named exports");
const path = 'plugin-module'; t.is(func, await loadPlugin({ cwd }, func, {}), "Defined as a function");
const options = {option1: 'value1', option2: 'value2'}; });
t.deepEqual(parseConfig(path), [path, {}], 'String definition'); test("parseConfig", (t) => {
t.deepEqual(parseConfig({path}), [path, {}], 'Object definition'); const path = "plugin-module";
t.deepEqual(parseConfig({path, ...options}), [path, options], 'Object definition with options'); const options = { option1: "value1", option2: "value2" };
t.deepEqual(parseConfig([path]), [path, {}], 'Array definition');
t.deepEqual(parseConfig([path, options]), [path, options], 'Array definition with options'); t.deepEqual(parseConfig(path), [path, {}], "String definition");
t.deepEqual(parseConfig({ path }), [path, {}], "Object definition");
t.deepEqual(parseConfig({ path, ...options }), [path, options], "Object definition with options");
t.deepEqual(parseConfig([path]), [path, {}], "Array definition");
t.deepEqual(parseConfig([path, options]), [path, options], "Array definition with options");
}); });