feat: pass cwd
and env
context to plugins
- Allow to run semantic-release (via API) from anywhere passing the current working directory. - Allows to simplify the tests and to run them in parallel in both the core and plugins.
This commit is contained in:
parent
12e4155cd3
commit
a94e08de9a
88
index.js
88
index.js
@ -1,3 +1,4 @@
|
||||
const process = require('process');
|
||||
const {template} = require('lodash');
|
||||
const marked = require('marked');
|
||||
const TerminalRenderer = require('marked-terminal');
|
||||
@ -19,23 +20,23 @@ const {COMMIT_NAME, COMMIT_EMAIL} = require('./lib/definitions/constants');
|
||||
|
||||
marked.setOptions({renderer: new TerminalRenderer()});
|
||||
|
||||
async function run(options, plugins) {
|
||||
const {isCi, branch, isPr} = envCi();
|
||||
async function run(context, plugins) {
|
||||
const {isCi, branch: ciBranch, isPr} = envCi();
|
||||
const {cwd, env, options, logger} = context;
|
||||
|
||||
if (!isCi && !options.dryRun && !options.noCi) {
|
||||
logger.log('This run was not triggered in a known CI environment, running in dry-run mode.');
|
||||
options.dryRun = true;
|
||||
} else {
|
||||
// When running on CI, set the commits author and commiter info and prevent the `git` CLI to prompt for username/password. See #703.
|
||||
process.env = {
|
||||
Object.assign(env, {
|
||||
GIT_AUTHOR_NAME: COMMIT_NAME,
|
||||
GIT_AUTHOR_EMAIL: COMMIT_EMAIL,
|
||||
GIT_COMMITTER_NAME: COMMIT_NAME,
|
||||
GIT_COMMITTER_EMAIL: COMMIT_EMAIL,
|
||||
...process.env,
|
||||
GIT_ASKPASS: 'echo',
|
||||
GIT_TERMINAL_PROMPT: 0,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
if (isCi && isPr && !options.noCi) {
|
||||
@ -43,23 +44,23 @@ async function run(options, plugins) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (branch !== options.branch) {
|
||||
if (ciBranch !== options.branch) {
|
||||
logger.log(
|
||||
`This test run was triggered on the branch ${branch}, while semantic-release is configured to only publish from ${
|
||||
`This test run was triggered on the branch ${ciBranch}, while semantic-release is configured to only publish from ${
|
||||
options.branch
|
||||
}, therefore a new version won’t be published.`
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
await verify(options);
|
||||
await verify(context);
|
||||
|
||||
options.repositoryUrl = await getGitAuthUrl(options);
|
||||
options.repositoryUrl = await getGitAuthUrl(context);
|
||||
|
||||
try {
|
||||
await verifyAuth(options.repositoryUrl, options.branch);
|
||||
await verifyAuth(options.repositoryUrl, options.branch, {cwd, env});
|
||||
} catch (err) {
|
||||
if (!(await isBranchUpToDate(options.branch))) {
|
||||
if (!(await isBranchUpToDate(options.branch, {cwd, env}))) {
|
||||
logger.log(
|
||||
"The local branch %s is behind the remote one, therefore a new version won't be published.",
|
||||
options.branch
|
||||
@ -72,56 +73,56 @@ async function run(options, plugins) {
|
||||
|
||||
logger.log('Run automated release from branch %s', options.branch);
|
||||
|
||||
await plugins.verifyConditions({options, logger});
|
||||
await plugins.verifyConditions(context);
|
||||
|
||||
await fetch(options.repositoryUrl);
|
||||
await fetch(options.repositoryUrl, {cwd, env});
|
||||
|
||||
const lastRelease = await getLastRelease(options.tagFormat, logger);
|
||||
const commits = await getCommits(lastRelease.gitHead, options.branch, logger);
|
||||
context.lastRelease = await getLastRelease(context);
|
||||
context.commits = await getCommits(context);
|
||||
|
||||
const type = await plugins.analyzeCommits({options, logger, lastRelease, commits});
|
||||
if (!type) {
|
||||
const nextRelease = {type: await plugins.analyzeCommits(context), gitHead: await getGitHead({cwd, env})};
|
||||
|
||||
if (!nextRelease.type) {
|
||||
logger.log('There are no relevant changes, so no new version is released.');
|
||||
return;
|
||||
}
|
||||
const version = getNextVersion(type, lastRelease, logger);
|
||||
const nextRelease = {type, version, gitHead: await getGitHead(), gitTag: template(options.tagFormat)({version})};
|
||||
context.nextRelease = nextRelease;
|
||||
nextRelease.version = getNextVersion(context);
|
||||
nextRelease.gitTag = template(options.tagFormat)({version: nextRelease.version});
|
||||
|
||||
await plugins.verifyRelease({options, logger, lastRelease, commits, nextRelease});
|
||||
|
||||
const generateNotesParam = {options, logger, lastRelease, commits, nextRelease};
|
||||
await plugins.verifyRelease(context);
|
||||
|
||||
if (options.dryRun) {
|
||||
const notes = await plugins.generateNotes(generateNotesParam);
|
||||
const notes = await plugins.generateNotes(context);
|
||||
logger.log('Release note for version %s:\n', nextRelease.version);
|
||||
if (notes) {
|
||||
process.stdout.write(`${marked(notes)}\n`);
|
||||
logger.stdout(`${marked(notes)}\n`);
|
||||
}
|
||||
} else {
|
||||
nextRelease.notes = await plugins.generateNotes(generateNotesParam);
|
||||
await plugins.prepare({options, logger, lastRelease, commits, nextRelease});
|
||||
nextRelease.notes = await plugins.generateNotes(context);
|
||||
await plugins.prepare(context);
|
||||
|
||||
// Create the tag before calling the publish plugins as some require the tag to exists
|
||||
logger.log('Create tag %s', nextRelease.gitTag);
|
||||
await tag(nextRelease.gitTag);
|
||||
await push(options.repositoryUrl, branch);
|
||||
await tag(nextRelease.gitTag, {cwd, env});
|
||||
await push(options.repositoryUrl, options.branch, {cwd, env});
|
||||
|
||||
const releases = await plugins.publish({options, logger, lastRelease, commits, nextRelease});
|
||||
context.releases = await plugins.publish(context);
|
||||
|
||||
await plugins.success({options, logger, lastRelease, commits, nextRelease, releases});
|
||||
await plugins.success(context);
|
||||
|
||||
logger.log('Published release: %s', nextRelease.version);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function logErrors(err) {
|
||||
function logErrors({logger}, err) {
|
||||
const errors = extractErrors(err).sort(error => (error.semanticRelease ? -1 : 0));
|
||||
for (const error of errors) {
|
||||
if (error.semanticRelease) {
|
||||
logger.log(`%s ${error.message}`, error.code);
|
||||
if (error.details) {
|
||||
process.stdout.write(`${marked(error.details)}\n`);
|
||||
logger.stderr(`${marked(error.details)}\n`);
|
||||
}
|
||||
} else {
|
||||
logger.error('An error occurred while running semantic-release: %O', error);
|
||||
@ -129,35 +130,36 @@ function logErrors(err) {
|
||||
}
|
||||
}
|
||||
|
||||
async function callFail(plugins, options, error) {
|
||||
async function callFail(context, plugins, error) {
|
||||
const errors = extractErrors(error).filter(error => error.semanticRelease);
|
||||
if (errors.length > 0) {
|
||||
try {
|
||||
await plugins.fail({options, logger, errors});
|
||||
await plugins.fail({...context, errors});
|
||||
} catch (err) {
|
||||
logErrors(err);
|
||||
logErrors(context, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = async opts => {
|
||||
logger.log(`Running %s version %s`, pkg.name, pkg.version);
|
||||
const {unhook} = hookStd({silent: false}, hideSensitive);
|
||||
module.exports = async (opts, {cwd = process.cwd(), env = process.env} = {}) => {
|
||||
const context = {cwd, env, logger};
|
||||
context.logger.log(`Running %s version %s`, pkg.name, pkg.version);
|
||||
const {unhook} = hookStd({silent: false}, hideSensitive(context.env));
|
||||
try {
|
||||
const config = await getConfig(opts, logger);
|
||||
const {plugins, options} = config;
|
||||
const {plugins, options} = await getConfig(context, opts);
|
||||
context.options = options;
|
||||
try {
|
||||
const result = await run(options, plugins);
|
||||
const result = await run(context, plugins);
|
||||
unhook();
|
||||
return result;
|
||||
} catch (err) {
|
||||
if (!options.dryRun) {
|
||||
await callFail(plugins, options, err);
|
||||
await callFail(context, plugins, err);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
} catch (err) {
|
||||
logErrors(err);
|
||||
logErrors(context, err);
|
||||
unhook();
|
||||
throw err;
|
||||
}
|
||||
|
@ -8,4 +8,6 @@ const COMMIT_EMAIL = 'semantic-release-bot@martynus.net';
|
||||
|
||||
const RELEASE_NOTES_SEPARATOR = '\n\n';
|
||||
|
||||
module.exports = {RELEASE_TYPE, FIRST_RELEASE, COMMIT_NAME, COMMIT_EMAIL, RELEASE_NOTES_SEPARATOR};
|
||||
const SECRET_REPLACEMENT = '[secure]';
|
||||
|
||||
module.exports = {RELEASE_TYPE, FIRST_RELEASE, COMMIT_NAME, COMMIT_EMAIL, RELEASE_NOTES_SEPARATOR, SECRET_REPLACEMENT};
|
||||
|
@ -30,8 +30,8 @@ module.exports = {
|
||||
configValidator: conf => !conf || (isArray(conf) ? conf : [conf]).every(conf => validatePluginConfig(conf)),
|
||||
outputValidator: output => !output || isString(output),
|
||||
pipelineConfig: () => ({
|
||||
getNextInput: ({nextRelease, ...generateNotesParam}, notes) => ({
|
||||
...generateNotesParam,
|
||||
getNextInput: ({nextRelease, ...context}, notes) => ({
|
||||
...context,
|
||||
nextRelease: {
|
||||
...nextRelease,
|
||||
notes: `${nextRelease.notes ? `${nextRelease.notes}${RELEASE_NOTES_SEPARATOR}` : ''}${notes}`,
|
||||
@ -44,17 +44,17 @@ module.exports = {
|
||||
default: ['@semantic-release/npm'],
|
||||
configValidator: conf => !conf || (isArray(conf) ? conf : [conf]).every(conf => validatePluginConfig(conf)),
|
||||
pipelineConfig: ({generateNotes}, logger) => ({
|
||||
getNextInput: async ({nextRelease, ...prepareParam}) => {
|
||||
const newGitHead = await gitHead();
|
||||
getNextInput: async context => {
|
||||
const newGitHead = await gitHead({cwd: context.cwd});
|
||||
// If previous prepare plugin has created a commit (gitHead changed)
|
||||
if (nextRelease.gitHead !== newGitHead) {
|
||||
nextRelease.gitHead = newGitHead;
|
||||
if (context.nextRelease.gitHead !== newGitHead) {
|
||||
context.nextRelease.gitHead = newGitHead;
|
||||
// Regenerate the release notes
|
||||
logger.log('Call plugin %s', 'generateNotes');
|
||||
nextRelease.notes = await generateNotes({nextRelease, ...prepareParam});
|
||||
context.nextRelease.notes = await generateNotes(context);
|
||||
}
|
||||
// Call the next publish plugin with the updated `nextRelease`
|
||||
return {...prepareParam, nextRelease};
|
||||
// Call the next prepare plugin with the updated `nextRelease`
|
||||
return context;
|
||||
},
|
||||
}),
|
||||
},
|
||||
|
@ -5,13 +5,11 @@ const debug = require('debug')('semantic-release:get-commits');
|
||||
/**
|
||||
* Retrieve the list of commits on the current branch since the commit sha associated with the last release, or all the commits of the current branch if there is no last released version.
|
||||
*
|
||||
* @param {String} gitHead The commit sha associated with the last release.
|
||||
* @param {String} branch The branch to release from.
|
||||
* @param {Object} logger Global logger.
|
||||
* @param {Object} context semantic-release context.
|
||||
*
|
||||
* @return {Promise<Array<Object>>} The list of commits on the branch `branch` since the last release.
|
||||
*/
|
||||
module.exports = async (gitHead, branch, logger) => {
|
||||
module.exports = async ({cwd, env, lastRelease: {gitHead}, logger}) => {
|
||||
if (gitHead) {
|
||||
debug('Use gitHead: %s', gitHead);
|
||||
} else {
|
||||
@ -19,13 +17,13 @@ module.exports = async (gitHead, branch, logger) => {
|
||||
}
|
||||
|
||||
Object.assign(gitLogParser.fields, {hash: 'H', message: 'B', gitTags: 'd', committerDate: {key: 'ci', type: Date}});
|
||||
const commits = (await getStream.array(gitLogParser.parse({_: `${gitHead ? gitHead + '..' : ''}HEAD`}))).map(
|
||||
commit => {
|
||||
const commits = (await getStream.array(
|
||||
gitLogParser.parse({_: `${gitHead ? gitHead + '..' : ''}HEAD`}, {cwd, env: {...process.env, ...env}})
|
||||
)).map(commit => {
|
||||
commit.message = commit.message.trim();
|
||||
commit.gitTags = commit.gitTags.trim();
|
||||
return commit;
|
||||
}
|
||||
);
|
||||
});
|
||||
logger.log('Found %s commits since last release', commits.length);
|
||||
debug('Parsed commits: %o', commits);
|
||||
return commits;
|
||||
|
@ -18,8 +18,9 @@ const CONFIG_FILES = [
|
||||
`${CONFIG_NAME}.config.js`,
|
||||
];
|
||||
|
||||
module.exports = async (opts, logger) => {
|
||||
const {config} = (await cosmiconfig(CONFIG_NAME, {searchPlaces: CONFIG_FILES}).search()) || {};
|
||||
module.exports = async (context, opts) => {
|
||||
const {cwd, env} = context;
|
||||
const {config} = (await cosmiconfig(CONFIG_NAME, {searchPlaces: CONFIG_FILES}).search(cwd)) || {};
|
||||
// Merge config file options and CLI/API options
|
||||
let options = {...config, ...opts};
|
||||
const pluginsPath = {};
|
||||
@ -29,8 +30,7 @@ module.exports = async (opts, logger) => {
|
||||
// If `extends` is defined, load and merge each shareable config with `options`
|
||||
options = {
|
||||
...castArray(extendPaths).reduce((result, extendPath) => {
|
||||
const extendsOpts = require(resolveFrom.silent(__dirname, extendPath) ||
|
||||
resolveFrom(process.cwd(), extendPath));
|
||||
const extendsOpts = require(resolveFrom.silent(__dirname, extendPath) || resolveFrom(cwd, extendPath));
|
||||
|
||||
// For each plugin defined in a shareable config, save in `pluginsPath` the extendable config path,
|
||||
// so those plugin will be loaded relatively to the config file
|
||||
@ -55,7 +55,7 @@ module.exports = async (opts, logger) => {
|
||||
// Set default options values if not defined yet
|
||||
options = {
|
||||
branch: 'master',
|
||||
repositoryUrl: (await pkgRepoUrl()) || (await repoUrl()),
|
||||
repositoryUrl: (await pkgRepoUrl({normalize: false, cwd})) || (await repoUrl({cwd, env})),
|
||||
tagFormat: `v\${version}`,
|
||||
// Remove `null` and `undefined` options so they can be replaced with default ones
|
||||
...pickBy(options, option => !isUndefined(option) && !isNull(option)),
|
||||
@ -63,10 +63,10 @@ module.exports = async (opts, logger) => {
|
||||
|
||||
debug('options values: %O', options);
|
||||
|
||||
return {options, plugins: await plugins(options, pluginsPath, logger)};
|
||||
return {options, plugins: await plugins({...context, options}, pluginsPath)};
|
||||
};
|
||||
|
||||
async function pkgRepoUrl() {
|
||||
const {pkg} = await readPkgUp({normalize: false});
|
||||
async function pkgRepoUrl(opts) {
|
||||
const {pkg} = await readPkgUp(opts);
|
||||
return pkg && (isPlainObject(pkg.repository) ? pkg.repository.url : pkg.repository);
|
||||
}
|
||||
|
@ -21,10 +21,11 @@ const GIT_TOKENS = {
|
||||
*
|
||||
* In addition, expand shortcut URLs (`owner/repo` => `https://github.com/owner/repo.git`) and transform `git+https` / `git+http` URLs to `https` / `http`.
|
||||
*
|
||||
* @param {String} repositoryUrl The user provided Git repository URL.
|
||||
* @param {Object} context semantic-release context.
|
||||
*
|
||||
* @return {String} The formatted Git repository URL.
|
||||
*/
|
||||
module.exports = async ({repositoryUrl, branch}) => {
|
||||
module.exports = async ({cwd, env, options: {repositoryUrl, branch}}) => {
|
||||
const info = hostedGitInfo.fromUrl(repositoryUrl, {noGitPlus: true});
|
||||
|
||||
if (info && info.getDefaultRepresentation() === 'shortcut') {
|
||||
@ -41,10 +42,10 @@ module.exports = async ({repositoryUrl, branch}) => {
|
||||
|
||||
// Test if push is allowed without transforming the URL (e.g. is ssh keys are set up)
|
||||
try {
|
||||
await verifyAuth(repositoryUrl, branch);
|
||||
await verifyAuth(repositoryUrl, branch, {cwd, env});
|
||||
} catch (err) {
|
||||
const envVar = Object.keys(GIT_TOKENS).find(envVar => !isUndefined(process.env[envVar]));
|
||||
const gitCredentials = `${GIT_TOKENS[envVar] || ''}${process.env[envVar] || ''}`;
|
||||
const envVar = Object.keys(GIT_TOKENS).find(envVar => !isUndefined(env[envVar]));
|
||||
const gitCredentials = `${GIT_TOKENS[envVar] || ''}${env[envVar] || ''}`;
|
||||
const {protocols, ...parsed} = gitUrlParse(repositoryUrl);
|
||||
const protocol = protocols.includes('https') ? 'https' : protocols.includes('http') ? 'http' : 'https';
|
||||
|
||||
|
@ -20,18 +20,17 @@ const {gitTags, isRefInHistory, gitTagHead} = require('./git');
|
||||
* - Sort the versions
|
||||
* - Retrive the highest version
|
||||
*
|
||||
* @param {String} tagFormat Git tag format.
|
||||
* @param {Object} logger Global logger.
|
||||
* @param {Object} context semantic-release context.
|
||||
*
|
||||
* @return {Promise<LastRelease>} The last tagged release or `undefined` if none is found.
|
||||
*/
|
||||
module.exports = async (tagFormat, logger) => {
|
||||
module.exports = async ({cwd, env, options: {tagFormat}, logger}) => {
|
||||
// Generate a regex to parse tags formatted with `tagFormat`
|
||||
// by replacing the `version` variable in the template by `(.+)`.
|
||||
// The `tagFormat` is compiled with space as the `version` as it's an invalid tag character,
|
||||
// so it's guaranteed to no be present in the `tagFormat`.
|
||||
const tagRegexp = `^${escapeRegExp(template(tagFormat)({version: ' '})).replace(' ', '(.+)')}`;
|
||||
|
||||
const tags = (await gitTags())
|
||||
const tags = (await gitTags({cwd, env}))
|
||||
.map(tag => ({gitTag: tag, version: (tag.match(tagRegexp) || new Array(2))[1]}))
|
||||
.filter(
|
||||
tag => tag.version && semver.valid(semver.clean(tag.version)) && !semver.prerelease(semver.clean(tag.version))
|
||||
@ -40,11 +39,11 @@ module.exports = async (tagFormat, logger) => {
|
||||
|
||||
debug('found tags: %o', tags);
|
||||
|
||||
const tag = await pLocate(tags, tag => isRefInHistory(tag.gitTag), {concurrency: 1, preserveOrder: true});
|
||||
const tag = await pLocate(tags, tag => isRefInHistory(tag.gitTag, {cwd, env}), {preserveOrder: true});
|
||||
|
||||
if (tag) {
|
||||
logger.log('Found git tag %s associated with version %s', tag.gitTag, tag.version);
|
||||
return {gitHead: await gitTagHead(tag.gitTag), ...tag};
|
||||
return {gitHead: await gitTagHead(tag.gitTag, {cwd, env}), ...tag};
|
||||
}
|
||||
|
||||
logger.log('No git tag version found');
|
||||
|
@ -1,7 +1,7 @@
|
||||
const semver = require('semver');
|
||||
const {FIRST_RELEASE} = require('./definitions/constants');
|
||||
|
||||
module.exports = (type, lastRelease, logger) => {
|
||||
module.exports = ({nextRelease: {type}, lastRelease, logger}) => {
|
||||
let version;
|
||||
if (lastRelease.version) {
|
||||
version = semver.inc(lastRelease.version, type);
|
||||
|
78
lib/git.js
78
lib/git.js
@ -5,23 +5,28 @@ const debug = require('debug')('semantic-release:git');
|
||||
* Get the commit sha for a given tag.
|
||||
*
|
||||
* @param {string} tagName Tag name for which to retrieve the commit sha.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @return {string} The commit sha of the tag in parameter or `null`.
|
||||
*/
|
||||
async function gitTagHead(tagName) {
|
||||
async function gitTagHead(tagName, execaOpts) {
|
||||
try {
|
||||
return await execa.stdout('git', ['rev-list', '-1', tagName]);
|
||||
return await execa.stdout('git', ['rev-list', '-1', tagName], execaOpts);
|
||||
} catch (err) {
|
||||
debug(err);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all the repository tags.
|
||||
*
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @return {Array<String>} List of git tags.
|
||||
* @throws {Error} If the `git` command fails.
|
||||
*/
|
||||
async function gitTags() {
|
||||
return (await execa.stdout('git', ['tag']))
|
||||
async function gitTags(execaOpts) {
|
||||
return (await execa.stdout('git', ['tag'], execaOpts))
|
||||
.split('\n')
|
||||
.map(tag => tag.trim())
|
||||
.filter(tag => Boolean(tag));
|
||||
@ -31,12 +36,13 @@ async function gitTags() {
|
||||
* Verify if the `ref` is in the direct history of the current branch.
|
||||
*
|
||||
* @param {string} ref The reference to look for.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @return {boolean} `true` if the reference is in the history of the current branch, falsy otherwise.
|
||||
*/
|
||||
async function isRefInHistory(ref) {
|
||||
async function isRefInHistory(ref, execaOpts) {
|
||||
try {
|
||||
await execa('git', ['merge-base', '--is-ancestor', ref, 'HEAD']);
|
||||
await execa('git', ['merge-base', '--is-ancestor', ref, 'HEAD'], execaOpts);
|
||||
return true;
|
||||
} catch (err) {
|
||||
if (err.code === 1) {
|
||||
@ -52,39 +58,52 @@ async function isRefInHistory(ref) {
|
||||
* Unshallow the git repository if necessary and fetch all the tags.
|
||||
*
|
||||
* @param {String} repositoryUrl The remote repository URL.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*/
|
||||
async function fetch(repositoryUrl) {
|
||||
async function fetch(repositoryUrl, execaOpts) {
|
||||
try {
|
||||
await execa('git', ['fetch', '--unshallow', '--tags', repositoryUrl]);
|
||||
await execa('git', ['fetch', '--unshallow', '--tags', repositoryUrl], execaOpts);
|
||||
} catch (err) {
|
||||
await execa('git', ['fetch', '--tags', repositoryUrl]);
|
||||
await execa('git', ['fetch', '--tags', repositoryUrl], execaOpts);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the HEAD sha.
|
||||
*
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @return {string} the sha of the HEAD commit.
|
||||
*/
|
||||
async function gitHead() {
|
||||
return execa.stdout('git', ['rev-parse', 'HEAD']);
|
||||
async function gitHead(execaOpts) {
|
||||
return execa.stdout('git', ['rev-parse', 'HEAD'], execaOpts);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the repository remote URL.
|
||||
*
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @return {string} The value of the remote git URL.
|
||||
*/
|
||||
async function repoUrl() {
|
||||
async function repoUrl(execaOpts) {
|
||||
try {
|
||||
return await execa.stdout('git', ['config', '--get', 'remote.origin.url']);
|
||||
return await execa.stdout('git', ['config', '--get', 'remote.origin.url'], execaOpts);
|
||||
} catch (err) {
|
||||
debug(err);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test if the current working directory is a Git repository.
|
||||
*
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @return {Boolean} `true` if the current working directory is in a git repository, falsy otherwise.
|
||||
*/
|
||||
async function isGitRepo() {
|
||||
async function isGitRepo(execaOpts) {
|
||||
try {
|
||||
return (await execa('git', ['rev-parse', '--git-dir'])).code === 0;
|
||||
return (await execa('git', ['rev-parse', '--git-dir'], execaOpts)).code === 0;
|
||||
} catch (err) {
|
||||
debug(err);
|
||||
}
|
||||
@ -95,12 +114,13 @@ async function isGitRepo() {
|
||||
*
|
||||
* @param {String} repositoryUrl The remote repository URL.
|
||||
* @param {String} branch The repositoru branch for which to verify write access.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @throws {Error} if not authorized to push.
|
||||
*/
|
||||
async function verifyAuth(repositoryUrl, branch) {
|
||||
async function verifyAuth(repositoryUrl, branch, execaOpts) {
|
||||
try {
|
||||
await execa('git', ['push', '--dry-run', repositoryUrl, `HEAD:${branch}`]);
|
||||
await execa('git', ['push', '--dry-run', repositoryUrl, `HEAD:${branch}`], execaOpts);
|
||||
} catch (err) {
|
||||
debug(err);
|
||||
throw err;
|
||||
@ -111,10 +131,12 @@ async function verifyAuth(repositoryUrl, branch) {
|
||||
* Tag the commit head on the local repository.
|
||||
*
|
||||
* @param {String} tagName The name of the tag.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @throws {Error} if the tag creation failed.
|
||||
*/
|
||||
async function tag(tagName) {
|
||||
await execa('git', ['tag', tagName]);
|
||||
async function tag(tagName, execaOpts) {
|
||||
await execa('git', ['tag', tagName], execaOpts);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -122,21 +144,25 @@ async function tag(tagName) {
|
||||
*
|
||||
* @param {String} repositoryUrl The remote repository URL.
|
||||
* @param {String} branch The branch to push.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @throws {Error} if the push failed.
|
||||
*/
|
||||
async function push(repositoryUrl, branch) {
|
||||
await execa('git', ['push', '--tags', repositoryUrl, `HEAD:${branch}`]);
|
||||
async function push(repositoryUrl, branch, execaOpts) {
|
||||
await execa('git', ['push', '--tags', repositoryUrl, `HEAD:${branch}`], execaOpts);
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify a tag name is a valid Git reference.
|
||||
*
|
||||
* @param {string} tagName the tag name to verify.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @return {boolean} `true` if valid, falsy otherwise.
|
||||
*/
|
||||
async function verifyTagName(tagName) {
|
||||
async function verifyTagName(tagName, execaOpts) {
|
||||
try {
|
||||
return (await execa('git', ['check-ref-format', `refs/tags/${tagName}`])).code === 0;
|
||||
return (await execa('git', ['check-ref-format', `refs/tags/${tagName}`], execaOpts)).code === 0;
|
||||
} catch (err) {
|
||||
debug(err);
|
||||
}
|
||||
@ -146,13 +172,15 @@ async function verifyTagName(tagName) {
|
||||
* Verify the local branch is up to date with the remote one.
|
||||
*
|
||||
* @param {String} branch The repository branch for which to verify status.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @return {Boolean} `true` is the HEAD of the current local branch is the same as the HEAD of the remote branch, falsy otherwise.
|
||||
*/
|
||||
async function isBranchUpToDate(branch) {
|
||||
async function isBranchUpToDate(branch, execaOpts) {
|
||||
try {
|
||||
return await isRefInHistory(
|
||||
(await execa.stdout('git', ['ls-remote', '--heads', 'origin', branch])).match(/^(\w+)?/)[1]
|
||||
(await execa.stdout('git', ['ls-remote', '--heads', 'origin', branch], execaOpts)).match(/^(\w+)?/)[1],
|
||||
execaOpts
|
||||
);
|
||||
} catch (err) {
|
||||
debug(err);
|
||||
|
@ -1,11 +1,13 @@
|
||||
const {escapeRegExp} = require('lodash');
|
||||
const {SECRET_REPLACEMENT} = require('./definitions/constants');
|
||||
|
||||
const toReplace = Object.keys(process.env).filter(
|
||||
envVar => /token|password|credential|secret|private/i.test(envVar) && process.env[envVar].trim()
|
||||
module.exports = env => {
|
||||
const toReplace = Object.keys(env).filter(
|
||||
envVar => /token|password|credential|secret|private/i.test(envVar) && env[envVar].trim()
|
||||
);
|
||||
|
||||
const regexp = new RegExp(toReplace.map(envVar => escapeRegExp(process.env[envVar])).join('|'), 'g');
|
||||
|
||||
module.exports = output => {
|
||||
return output && toReplace.length > 0 ? output.toString().replace(regexp, '[secure]') : output;
|
||||
const regexp = new RegExp(toReplace.map(envVar => escapeRegExp(env[envVar])).join('|'), 'g');
|
||||
return output => {
|
||||
return output && toReplace.length > 0 ? output.toString().replace(regexp, SECRET_REPLACEMENT) : output;
|
||||
};
|
||||
};
|
||||
|
@ -20,4 +20,10 @@ module.exports = {
|
||||
...(typeof format === 'string' ? [] : [format]).concat(rest)
|
||||
);
|
||||
},
|
||||
stdout(...args) {
|
||||
console.log(args);
|
||||
},
|
||||
stderr(...args) {
|
||||
console.error(args);
|
||||
},
|
||||
};
|
||||
|
@ -5,17 +5,17 @@ const PLUGINS_DEFINITIONS = require('../definitions/plugins');
|
||||
const pipeline = require('./pipeline');
|
||||
const normalize = require('./normalize');
|
||||
|
||||
module.exports = (options, pluginsPath, logger) => {
|
||||
module.exports = ({cwd, options, logger}, pluginsPath) => {
|
||||
const errors = [];
|
||||
const plugins = Object.entries(PLUGINS_DEFINITIONS).reduce(
|
||||
(
|
||||
plugins,
|
||||
[type, {configValidator, default: def, pipelineConfig, postprocess = identity, preprocess = identity}]
|
||||
) => {
|
||||
let pluginConfs;
|
||||
let pluginOpts;
|
||||
|
||||
if (isUndefined(options[type])) {
|
||||
pluginConfs = def;
|
||||
pluginOpts = def;
|
||||
} else {
|
||||
const defaultPaths = castArray(def);
|
||||
// If an object is passed and the path is missing, set the default one for single plugins
|
||||
@ -26,11 +26,12 @@ module.exports = (options, pluginsPath, logger) => {
|
||||
errors.push(getError('EPLUGINCONF', {type, pluginConf: options[type]}));
|
||||
return plugins;
|
||||
}
|
||||
pluginConfs = options[type];
|
||||
pluginOpts = options[type];
|
||||
}
|
||||
|
||||
const globalOpts = omit(options, Object.keys(PLUGINS_DEFINITIONS));
|
||||
const steps = castArray(pluginConfs).map(conf => normalize(type, pluginsPath, globalOpts, conf, logger));
|
||||
const steps = castArray(pluginOpts).map(pluginOpt =>
|
||||
normalize({cwd, options: omit(options, Object.keys(PLUGINS_DEFINITIONS)), logger}, type, pluginOpt, pluginsPath)
|
||||
);
|
||||
|
||||
plugins[type] = async input =>
|
||||
postprocess(await pipeline(steps, pipelineConfig && pipelineConfig(plugins, logger))(await preprocess(input)));
|
||||
|
@ -5,17 +5,15 @@ const getError = require('../get-error');
|
||||
const {extractErrors} = require('../utils');
|
||||
const PLUGINS_DEFINITIONS = require('../definitions/plugins');
|
||||
|
||||
/* eslint max-params: ["error", 5] */
|
||||
|
||||
module.exports = (type, pluginsPath, globalOpts, pluginOpts, logger) => {
|
||||
if (!pluginOpts) {
|
||||
module.exports = ({cwd, options, logger}, type, pluginOpt, pluginsPath) => {
|
||||
if (!pluginOpt) {
|
||||
return noop;
|
||||
}
|
||||
|
||||
const {path, ...config} = isString(pluginOpts) || isFunction(pluginOpts) ? {path: pluginOpts} : pluginOpts;
|
||||
const {path, ...config} = isString(pluginOpt) || isFunction(pluginOpt) ? {path: pluginOpt} : pluginOpt;
|
||||
const pluginName = isFunction(path) ? `[Function: ${path.name}]` : path;
|
||||
|
||||
if (!isFunction(pluginOpts)) {
|
||||
if (!isFunction(pluginOpt)) {
|
||||
if (pluginsPath[path]) {
|
||||
logger.log('Load plugin "%s" from %s in shareable config %s', type, path, pluginsPath[path]);
|
||||
} else {
|
||||
@ -24,17 +22,15 @@ module.exports = (type, pluginsPath, globalOpts, pluginOpts, logger) => {
|
||||
}
|
||||
|
||||
const basePath = pluginsPath[path]
|
||||
? dirname(resolveFrom.silent(__dirname, pluginsPath[path]) || resolveFrom(process.cwd(), pluginsPath[path]))
|
||||
? dirname(resolveFrom.silent(__dirname, pluginsPath[path]) || resolveFrom(cwd, pluginsPath[path]))
|
||||
: __dirname;
|
||||
const plugin = isFunction(path)
|
||||
? path
|
||||
: require(resolveFrom.silent(basePath, path) || resolveFrom(process.cwd(), path));
|
||||
const plugin = isFunction(path) ? path : require(resolveFrom.silent(basePath, path) || resolveFrom(cwd, path));
|
||||
|
||||
let func;
|
||||
if (isFunction(plugin)) {
|
||||
func = plugin.bind(null, cloneDeep({...globalOpts, ...config}));
|
||||
func = plugin.bind(null, cloneDeep({...options, ...config}));
|
||||
} else if (isPlainObject(plugin) && plugin[type] && isFunction(plugin[type])) {
|
||||
func = plugin[type].bind(null, cloneDeep({...globalOpts, ...config}));
|
||||
func = plugin[type].bind(null, cloneDeep({...options, ...config}));
|
||||
} else {
|
||||
throw getError('EPLUGIN', {type, pluginName});
|
||||
}
|
||||
|
@ -3,25 +3,25 @@ const AggregateError = require('aggregate-error');
|
||||
const {isGitRepo, verifyTagName} = require('./git');
|
||||
const getError = require('./get-error');
|
||||
|
||||
module.exports = async options => {
|
||||
module.exports = async ({cwd, env, options: {repositoryUrl, tagFormat}}) => {
|
||||
const errors = [];
|
||||
|
||||
if (!(await isGitRepo())) {
|
||||
if (!(await isGitRepo({cwd, env}))) {
|
||||
errors.push(getError('ENOGITREPO'));
|
||||
} else if (!options.repositoryUrl) {
|
||||
} else if (!repositoryUrl) {
|
||||
errors.push(getError('ENOREPOURL'));
|
||||
}
|
||||
|
||||
// Verify that compiling the `tagFormat` produce a valid Git tag
|
||||
if (!(await verifyTagName(template(options.tagFormat)({version: '0.0.0'})))) {
|
||||
errors.push(getError('EINVALIDTAGFORMAT', {tagFormat: options.tagFormat}));
|
||||
if (!(await verifyTagName(template(tagFormat)({version: '0.0.0'})))) {
|
||||
errors.push(getError('EINVALIDTAGFORMAT', {tagFormat}));
|
||||
}
|
||||
|
||||
// Verify the `tagFormat` contains the variable `version` by compiling the `tagFormat` template
|
||||
// with a space as the `version` value and verify the result contains the space.
|
||||
// The space is used as it's an invalid tag character, so it's guaranteed to no be present in the `tagFormat`.
|
||||
if ((template(options.tagFormat)({version: ' '}).match(/ /g) || []).length !== 1) {
|
||||
errors.push(getError('ETAGNOVERSION', {tagFormat: options.tagFormat}));
|
||||
if ((template(tagFormat)({version: ' '}).match(/ /g) || []).length !== 1) {
|
||||
errors.push(getError('ETAGNOVERSION', {tagFormat}));
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
|
@ -19,11 +19,11 @@
|
||||
"Pierre Vanduynslager (https://twitter.com/@pvdlg_)"
|
||||
],
|
||||
"dependencies": {
|
||||
"@semantic-release/commit-analyzer": "^5.0.0",
|
||||
"@semantic-release/commit-analyzer": "^6.0.0",
|
||||
"@semantic-release/error": "^2.2.0",
|
||||
"@semantic-release/github": "^4.1.0",
|
||||
"@semantic-release/npm": "^3.2.0",
|
||||
"@semantic-release/release-notes-generator": "^6.0.0",
|
||||
"@semantic-release/github": "^5.0.0",
|
||||
"@semantic-release/npm": "^4.0.0",
|
||||
"@semantic-release/release-notes-generator": "^7.0.0",
|
||||
"aggregate-error": "^1.0.0",
|
||||
"chalk": "^2.3.0",
|
||||
"cosmiconfig": "^5.0.1",
|
||||
|
2
test/fixtures/plugin-identity.js
vendored
2
test/fixtures/plugin-identity.js
vendored
@ -1 +1 @@
|
||||
module.exports = (pluginConfig, options) => options;
|
||||
module.exports = (pluginConfig, context) => context;
|
||||
|
6
test/fixtures/plugin-log-env.js
vendored
Normal file
6
test/fixtures/plugin-log-env.js
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
module.exports = (pluginConfig, {env, logger}) => {
|
||||
console.log(`Console: Exposing token ${env.MY_TOKEN}`);
|
||||
logger.log(`Log: Exposing token ${env.MY_TOKEN}`);
|
||||
logger.error(`Error: Console token ${env.MY_TOKEN}`);
|
||||
throw new Error(`Throw error: Exposing ${env.MY_TOKEN}`);
|
||||
};
|
2
test/fixtures/plugin-result-config.js
vendored
2
test/fixtures/plugin-result-config.js
vendored
@ -1 +1 @@
|
||||
module.exports = (pluginConfig, options) => ({pluginConfig, options});
|
||||
module.exports = (pluginConfig, context) => ({pluginConfig, context});
|
||||
|
@ -3,9 +3,6 @@ import {stub} from 'sinon';
|
||||
import getCommits from '../lib/get-commits';
|
||||
import {gitRepo, gitCommits, gitDetachedHead} from './helpers/git-utils';
|
||||
|
||||
// Save the current working diretory
|
||||
const cwd = process.cwd();
|
||||
|
||||
test.beforeEach(t => {
|
||||
// Stub the logger functions
|
||||
t.context.log = stub();
|
||||
@ -13,49 +10,52 @@ test.beforeEach(t => {
|
||||
t.context.logger = {log: t.context.log, error: t.context.error};
|
||||
});
|
||||
|
||||
test.afterEach.always(() => {
|
||||
// Restore the current working directory
|
||||
process.chdir(cwd);
|
||||
});
|
||||
|
||||
test.serial('Get all commits when there is no last release', async t => {
|
||||
test('Get all commits when there is no last release', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
// Add commits to the master branch
|
||||
const commits = await gitCommits(['First', 'Second']);
|
||||
const commits = await gitCommits(['First', 'Second'], {cwd});
|
||||
|
||||
// Retrieve the commits with the commits module
|
||||
const result = await getCommits(undefined, 'master', t.context.logger);
|
||||
const result = await getCommits({cwd, lastRelease: {}, logger: t.context.logger});
|
||||
|
||||
// Verify the commits created and retrieved by the module are identical
|
||||
t.is(result.length, 2);
|
||||
t.deepEqual(result, commits);
|
||||
});
|
||||
|
||||
test.serial('Get all commits since gitHead (from lastRelease)', async t => {
|
||||
test('Get all commits since gitHead (from lastRelease)', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
// Add commits to the master branch
|
||||
const commits = await gitCommits(['First', 'Second', 'Third']);
|
||||
const commits = await gitCommits(['First', 'Second', 'Third'], {cwd});
|
||||
|
||||
// Retrieve the commits with the commits module, since commit 'First'
|
||||
const result = await getCommits(commits[commits.length - 1].hash, 'master', t.context.logger);
|
||||
const result = await getCommits({
|
||||
cwd,
|
||||
lastRelease: {gitHead: commits[commits.length - 1].hash},
|
||||
logger: t.context.logger,
|
||||
});
|
||||
|
||||
// Verify the commits created and retrieved by the module are identical
|
||||
t.is(result.length, 2);
|
||||
t.deepEqual(result, commits.slice(0, 2));
|
||||
});
|
||||
|
||||
test.serial('Get all commits since gitHead (from lastRelease) on a detached head repo', async t => {
|
||||
test('Get all commits since gitHead (from lastRelease) on a detached head repo', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repo = await gitRepo();
|
||||
let {cwd, repositoryUrl} = await gitRepo();
|
||||
// Add commits to the master branch
|
||||
const commits = await gitCommits(['First', 'Second', 'Third']);
|
||||
const commits = await gitCommits(['First', 'Second', 'Third'], {cwd});
|
||||
// Create a detached head repo at commit 'feat: Second'
|
||||
await gitDetachedHead(repo, commits[1].hash);
|
||||
cwd = await gitDetachedHead(repositoryUrl, commits[1].hash);
|
||||
|
||||
// Retrieve the commits with the commits module, since commit 'First'
|
||||
const result = await getCommits(commits[commits.length - 1].hash, 'master', t.context.logger);
|
||||
const result = await getCommits({
|
||||
cwd,
|
||||
lastRelease: {gitHead: commits[commits.length - 1].hash},
|
||||
logger: t.context.logger,
|
||||
});
|
||||
|
||||
// Verify the module retrieved only the commit 'feat: Second' (included in the detached and after 'fix: First')
|
||||
t.is(result.length, 1);
|
||||
@ -66,25 +66,29 @@ test.serial('Get all commits since gitHead (from lastRelease) on a detached head
|
||||
t.truthy(result[0].committer.name);
|
||||
});
|
||||
|
||||
test.serial('Return empty array if lastRelease.gitHead is the last commit', async t => {
|
||||
test('Return empty array if lastRelease.gitHead is the last commit', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
// Add commits to the master branch
|
||||
const commits = await gitCommits(['First', 'Second']);
|
||||
const commits = await gitCommits(['First', 'Second'], {cwd});
|
||||
|
||||
// Retrieve the commits with the commits module, since commit 'Second' (therefore none)
|
||||
const result = await getCommits(commits[0].hash, 'master', t.context.logger);
|
||||
const result = await getCommits({
|
||||
cwd,
|
||||
lastRelease: {gitHead: commits[0].hash},
|
||||
logger: t.context.logger,
|
||||
});
|
||||
|
||||
// Verify no commit is retrieved
|
||||
t.deepEqual(result, []);
|
||||
});
|
||||
|
||||
test.serial('Return empty array if there is no commits', async t => {
|
||||
test('Return empty array if there is no commits', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
// Retrieve the commits with the commits module
|
||||
const result = await getCommits(undefined, 'master', t.context.logger);
|
||||
const result = await getCommits({cwd, lastRelease: {}, logger: t.context.logger});
|
||||
|
||||
// Verify no commit is retrieved
|
||||
t.deepEqual(result, []);
|
||||
|
@ -1,3 +1,4 @@
|
||||
import path from 'path';
|
||||
import {format} from 'util';
|
||||
import test from 'ava';
|
||||
import {writeFile, outputJson} from 'fs-extra';
|
||||
@ -7,184 +8,166 @@ import {stub} from 'sinon';
|
||||
import yaml from 'js-yaml';
|
||||
import {gitRepo, gitCommits, gitShallowClone, gitAddConfig} from './helpers/git-utils';
|
||||
|
||||
// Save the current process.env
|
||||
const envBackup = Object.assign({}, process.env);
|
||||
// Save the current working diretory
|
||||
const cwd = process.cwd();
|
||||
|
||||
test.beforeEach(t => {
|
||||
delete process.env.GIT_CREDENTIALS;
|
||||
delete process.env.GH_TOKEN;
|
||||
delete process.env.GITHUB_TOKEN;
|
||||
delete process.env.GL_TOKEN;
|
||||
delete process.env.GITLAB_TOKEN;
|
||||
// Delete environment variables that could have been set on the machine running the tests
|
||||
t.context.plugins = stub().returns({});
|
||||
t.context.getConfig = proxyquire('../lib/get-config', {'./plugins': t.context.plugins});
|
||||
});
|
||||
|
||||
test.afterEach.always(() => {
|
||||
// Restore process.env
|
||||
process.env = envBackup;
|
||||
// Restore the current working directory
|
||||
process.chdir(cwd);
|
||||
});
|
||||
|
||||
test.serial('Default values, reading repositoryUrl from package.json', async t => {
|
||||
test('Default values, reading repositoryUrl from package.json', async t => {
|
||||
const pkg = {repository: 'https://host.null/owner/package.git'};
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
await gitCommits(['First']);
|
||||
const {cwd} = await gitRepo();
|
||||
await gitCommits(['First'], {cwd});
|
||||
// Add remote.origin.url config
|
||||
await gitAddConfig('remote.origin.url', 'git@host.null:owner/repo.git');
|
||||
await gitAddConfig('remote.origin.url', 'git@host.null:owner/repo.git', {cwd});
|
||||
// Create package.json in repository root
|
||||
await outputJson('./package.json', pkg);
|
||||
await outputJson(path.resolve(cwd, 'package.json'), pkg);
|
||||
|
||||
const {options} = await t.context.getConfig();
|
||||
const {options: result} = await t.context.getConfig({cwd});
|
||||
|
||||
// Verify the default options are set
|
||||
t.is(options.branch, 'master');
|
||||
t.is(options.repositoryUrl, 'https://host.null/owner/package.git');
|
||||
t.is(options.tagFormat, `v\${version}`);
|
||||
t.is(result.branch, 'master');
|
||||
t.is(result.repositoryUrl, 'https://host.null/owner/package.git');
|
||||
t.is(result.tagFormat, `v\${version}`);
|
||||
});
|
||||
|
||||
test.serial('Default values, reading repositoryUrl from repo if not set in package.json', async t => {
|
||||
test('Default values, reading repositoryUrl from repo if not set in package.json', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
// Add remote.origin.url config
|
||||
await gitAddConfig('remote.origin.url', 'https://host.null/owner/module.git');
|
||||
await gitAddConfig('remote.origin.url', 'https://host.null/owner/module.git', {cwd});
|
||||
|
||||
const {options} = await t.context.getConfig();
|
||||
const {options: result} = await t.context.getConfig({cwd});
|
||||
|
||||
// Verify the default options are set
|
||||
t.is(options.branch, 'master');
|
||||
t.is(options.repositoryUrl, 'https://host.null/owner/module.git');
|
||||
t.is(options.tagFormat, `v\${version}`);
|
||||
t.is(result.branch, 'master');
|
||||
t.is(result.repositoryUrl, 'https://host.null/owner/module.git');
|
||||
t.is(result.tagFormat, `v\${version}`);
|
||||
});
|
||||
|
||||
test.serial('Default values, reading repositoryUrl (http url) from package.json if not set in repo', async t => {
|
||||
test('Default values, reading repositoryUrl (http url) from package.json if not set in repo', async t => {
|
||||
const pkg = {repository: 'https://host.null/owner/module.git'};
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
// Create package.json in repository root
|
||||
await outputJson('./package.json', pkg);
|
||||
await outputJson(path.resolve(cwd, 'package.json'), pkg);
|
||||
|
||||
const {options} = await t.context.getConfig();
|
||||
const {options: result} = await t.context.getConfig({cwd});
|
||||
|
||||
// Verify the default options are set
|
||||
t.is(options.branch, 'master');
|
||||
t.is(options.repositoryUrl, 'https://host.null/owner/module.git');
|
||||
t.is(options.tagFormat, `v\${version}`);
|
||||
t.is(result.branch, 'master');
|
||||
t.is(result.repositoryUrl, 'https://host.null/owner/module.git');
|
||||
t.is(result.tagFormat, `v\${version}`);
|
||||
});
|
||||
|
||||
test.serial('Read options from package.json', async t => {
|
||||
const release = {
|
||||
test('Read options from package.json', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const {cwd} = await gitRepo();
|
||||
const options = {
|
||||
analyzeCommits: {path: 'analyzeCommits', param: 'analyzeCommits_param'},
|
||||
generateNotes: 'generateNotes',
|
||||
branch: 'test_branch',
|
||||
repositoryUrl: 'https://host.null/owner/module.git',
|
||||
tagFormat: `v\${version}`,
|
||||
};
|
||||
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
// Create package.json in repository root
|
||||
await outputJson('./package.json', {release});
|
||||
await outputJson(path.resolve(cwd, 'package.json'), {release: options});
|
||||
|
||||
const {options} = await t.context.getConfig();
|
||||
const {options: result} = await t.context.getConfig({cwd});
|
||||
|
||||
// Verify the options contains the plugin config from package.json
|
||||
t.deepEqual(options, release);
|
||||
t.deepEqual(result, options);
|
||||
// Verify the plugins module is called with the plugin options from package.json
|
||||
t.deepEqual(t.context.plugins.args[0][0], release);
|
||||
t.deepEqual(t.context.plugins.args[0][0], {cwd, options});
|
||||
});
|
||||
|
||||
test.serial('Read options from .releaserc.yml', async t => {
|
||||
const release = {
|
||||
test('Read options from .releaserc.yml', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const {cwd} = await gitRepo();
|
||||
const options = {
|
||||
analyzeCommits: {path: 'analyzeCommits', param: 'analyzeCommits_param'},
|
||||
branch: 'test_branch',
|
||||
repositoryUrl: 'https://host.null/owner/module.git',
|
||||
tagFormat: `v\${version}`,
|
||||
};
|
||||
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
// Create package.json in repository root
|
||||
await writeFile('.releaserc.yml', yaml.safeDump(release));
|
||||
await writeFile(path.resolve(cwd, '.releaserc.yml'), yaml.safeDump(options));
|
||||
|
||||
const {options} = await t.context.getConfig();
|
||||
const {options: result} = await t.context.getConfig({cwd});
|
||||
|
||||
// Verify the options contains the plugin config from package.json
|
||||
t.deepEqual(options, release);
|
||||
t.deepEqual(result, options);
|
||||
// Verify the plugins module is called with the plugin options from package.json
|
||||
t.deepEqual(t.context.plugins.args[0][0], release);
|
||||
t.deepEqual(t.context.plugins.args[0][0], {cwd, options});
|
||||
});
|
||||
|
||||
test.serial('Read options from .releaserc.json', async t => {
|
||||
const release = {
|
||||
test('Read options from .releaserc.json', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const {cwd} = await gitRepo();
|
||||
const options = {
|
||||
analyzeCommits: {path: 'analyzeCommits', param: 'analyzeCommits_param'},
|
||||
branch: 'test_branch',
|
||||
repositoryUrl: 'https://host.null/owner/module.git',
|
||||
tagFormat: `v\${version}`,
|
||||
};
|
||||
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
// Create package.json in repository root
|
||||
await outputJson('.releaserc.json', release);
|
||||
await outputJson(path.resolve(cwd, '.releaserc.json'), options);
|
||||
|
||||
const {options} = await t.context.getConfig();
|
||||
const {options: result} = await t.context.getConfig({cwd});
|
||||
|
||||
// Verify the options contains the plugin config from package.json
|
||||
t.deepEqual(options, release);
|
||||
t.deepEqual(result, options);
|
||||
// Verify the plugins module is called with the plugin options from package.json
|
||||
t.deepEqual(t.context.plugins.args[0][0], release);
|
||||
t.deepEqual(t.context.plugins.args[0][0], {cwd, options});
|
||||
});
|
||||
|
||||
test.serial('Read options from .releaserc.js', async t => {
|
||||
const release = {
|
||||
test('Read options from .releaserc.js', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const {cwd} = await gitRepo();
|
||||
const options = {
|
||||
analyzeCommits: {path: 'analyzeCommits', param: 'analyzeCommits_param'},
|
||||
branch: 'test_branch',
|
||||
repositoryUrl: 'https://host.null/owner/module.git',
|
||||
tagFormat: `v\${version}`,
|
||||
};
|
||||
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
// Create package.json in repository root
|
||||
await writeFile('.releaserc.js', `module.exports = ${JSON.stringify(release)}`);
|
||||
await writeFile(path.resolve(cwd, '.releaserc.js'), `module.exports = ${JSON.stringify(options)}`);
|
||||
|
||||
const {options} = await t.context.getConfig();
|
||||
const {options: result} = await t.context.getConfig({cwd});
|
||||
|
||||
// Verify the options contains the plugin config from package.json
|
||||
t.deepEqual(options, release);
|
||||
t.deepEqual(result, options);
|
||||
// Verify the plugins module is called with the plugin options from package.json
|
||||
t.deepEqual(t.context.plugins.args[0][0], release);
|
||||
t.deepEqual(t.context.plugins.args[0][0], {cwd, options});
|
||||
});
|
||||
|
||||
test.serial('Read options from release.config.js', async t => {
|
||||
const release = {
|
||||
test('Read options from release.config.js', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const {cwd} = await gitRepo();
|
||||
const options = {
|
||||
analyzeCommits: {path: 'analyzeCommits', param: 'analyzeCommits_param'},
|
||||
branch: 'test_branch',
|
||||
repositoryUrl: 'https://host.null/owner/module.git',
|
||||
tagFormat: `v\${version}`,
|
||||
};
|
||||
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
// Create package.json in repository root
|
||||
await writeFile('release.config.js', `module.exports = ${JSON.stringify(release)}`);
|
||||
await writeFile(path.resolve(cwd, 'release.config.js'), `module.exports = ${JSON.stringify(options)}`);
|
||||
|
||||
const {options} = await t.context.getConfig();
|
||||
const {options: result} = await t.context.getConfig({cwd});
|
||||
|
||||
// Verify the options contains the plugin config from package.json
|
||||
t.deepEqual(options, release);
|
||||
t.deepEqual(result, options);
|
||||
// Verify the plugins module is called with the plugin options from package.json
|
||||
t.deepEqual(t.context.plugins.args[0][0], release);
|
||||
t.deepEqual(t.context.plugins.args[0][0], {cwd, options});
|
||||
});
|
||||
|
||||
test.serial('Prioritise CLI/API parameters over file configuration and git repo', async t => {
|
||||
const release = {
|
||||
test('Prioritise CLI/API parameters over file configuration and git repo', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
let {cwd, repositoryUrl} = await gitRepo();
|
||||
await gitCommits(['First'], {cwd});
|
||||
// Create a clone
|
||||
cwd = await gitShallowClone(repositoryUrl);
|
||||
const pkgOptions = {
|
||||
analyzeCommits: {path: 'analyzeCommits', param: 'analyzeCommits_pkg'},
|
||||
branch: 'branch_pkg',
|
||||
};
|
||||
@ -194,109 +177,100 @@ test.serial('Prioritise CLI/API parameters over file configuration and git repo'
|
||||
repositoryUrl: 'http://cli-url.com/owner/package',
|
||||
tagFormat: `cli\${version}`,
|
||||
};
|
||||
const pkg = {release, repository: 'git@host.null:owner/module.git'};
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repo = await gitRepo();
|
||||
await gitCommits(['First']);
|
||||
// Create a clone
|
||||
await gitShallowClone(repo);
|
||||
const pkg = {release: pkgOptions, repository: 'git@host.null:owner/module.git'};
|
||||
// Create package.json in repository root
|
||||
await outputJson('./package.json', pkg);
|
||||
await outputJson(path.resolve(cwd, 'package.json'), pkg);
|
||||
|
||||
const result = await t.context.getConfig(options);
|
||||
const result = await t.context.getConfig({cwd}, options);
|
||||
|
||||
// Verify the options contains the plugin config from CLI/API
|
||||
t.deepEqual(result.options, options);
|
||||
// Verify the plugins module is called with the plugin options from CLI/API
|
||||
t.deepEqual(t.context.plugins.args[0][0], options);
|
||||
t.deepEqual(t.context.plugins.args[0][0], {cwd, options});
|
||||
});
|
||||
|
||||
test.serial('Read configuration from file path in "extends"', async t => {
|
||||
const release = {extends: './shareable.json'};
|
||||
const shareable = {
|
||||
test('Read configuration from file path in "extends"', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const {cwd} = await gitRepo();
|
||||
const pkgOptions = {extends: './shareable.json'};
|
||||
const options = {
|
||||
analyzeCommits: {path: 'analyzeCommits', param: 'analyzeCommits_param'},
|
||||
generateNotes: 'generateNotes',
|
||||
branch: 'test_branch',
|
||||
repositoryUrl: 'https://host.null/owner/module.git',
|
||||
tagFormat: `v\${version}`,
|
||||
};
|
||||
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
// Create package.json and shareable.json in repository root
|
||||
await outputJson('./package.json', {release});
|
||||
await outputJson('./shareable.json', shareable);
|
||||
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
|
||||
await outputJson(path.resolve(cwd, 'shareable.json'), options);
|
||||
|
||||
const {options} = await t.context.getConfig();
|
||||
const {options: result} = await t.context.getConfig({cwd});
|
||||
|
||||
// Verify the options contains the plugin config from shareable.json
|
||||
t.deepEqual(options, shareable);
|
||||
t.deepEqual(result, options);
|
||||
// Verify the plugins module is called with the plugin options from shareable.json
|
||||
t.deepEqual(t.context.plugins.args[0][0], shareable);
|
||||
t.deepEqual(t.context.plugins.args[0][0], {cwd, options});
|
||||
t.deepEqual(t.context.plugins.args[0][1], {
|
||||
analyzeCommits: './shareable.json',
|
||||
generateNotes: './shareable.json',
|
||||
});
|
||||
});
|
||||
|
||||
test.serial('Read configuration from module path in "extends"', async t => {
|
||||
const release = {extends: 'shareable'};
|
||||
const shareable = {
|
||||
test('Read configuration from module path in "extends"', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const {cwd} = await gitRepo();
|
||||
const pkgOptions = {extends: 'shareable'};
|
||||
const options = {
|
||||
analyzeCommits: {path: 'analyzeCommits', param: 'analyzeCommits_param'},
|
||||
generateNotes: 'generateNotes',
|
||||
branch: 'test_branch',
|
||||
repositoryUrl: 'https://host.null/owner/module.git',
|
||||
tagFormat: `v\${version}`,
|
||||
};
|
||||
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
// Create package.json and shareable.json in repository root
|
||||
await outputJson('./package.json', {release});
|
||||
await outputJson('./node_modules/shareable/index.json', shareable);
|
||||
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
|
||||
await outputJson(path.resolve(cwd, 'node_modules/shareable/index.json'), options);
|
||||
|
||||
const {options} = await t.context.getConfig();
|
||||
const {options: results} = await t.context.getConfig({cwd});
|
||||
|
||||
// Verify the options contains the plugin config from shareable.json
|
||||
t.deepEqual(options, shareable);
|
||||
t.deepEqual(results, options);
|
||||
// Verify the plugins module is called with the plugin options from shareable.json
|
||||
t.deepEqual(t.context.plugins.args[0][0], shareable);
|
||||
t.deepEqual(t.context.plugins.args[0][0], {cwd, options});
|
||||
t.deepEqual(t.context.plugins.args[0][1], {
|
||||
analyzeCommits: 'shareable',
|
||||
generateNotes: 'shareable',
|
||||
});
|
||||
});
|
||||
|
||||
test.serial('Read configuration from an array of paths in "extends"', async t => {
|
||||
const release = {extends: ['./shareable1.json', './shareable2.json']};
|
||||
const shareable1 = {
|
||||
test('Read configuration from an array of paths in "extends"', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const {cwd} = await gitRepo();
|
||||
const pkgOptions = {extends: ['./shareable1.json', './shareable2.json']};
|
||||
const options1 = {
|
||||
verifyRelease: 'verifyRelease1',
|
||||
analyzeCommits: {path: 'analyzeCommits1', param: 'analyzeCommits_param1'},
|
||||
branch: 'test_branch',
|
||||
repositoryUrl: 'https://host.null/owner/module.git',
|
||||
};
|
||||
|
||||
const shareable2 = {
|
||||
const options2 = {
|
||||
verifyRelease: 'verifyRelease2',
|
||||
generateNotes: 'generateNotes2',
|
||||
analyzeCommits: {path: 'analyzeCommits2', param: 'analyzeCommits_param2'},
|
||||
branch: 'test_branch',
|
||||
tagFormat: `v\${version}`,
|
||||
};
|
||||
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
// Create package.json and shareable.json in repository root
|
||||
await outputJson('./package.json', {release});
|
||||
await outputJson('./shareable1.json', shareable1);
|
||||
await outputJson('./shareable2.json', shareable2);
|
||||
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
|
||||
await outputJson(path.resolve(cwd, 'shareable1.json'), options1);
|
||||
await outputJson(path.resolve(cwd, 'shareable2.json'), options2);
|
||||
|
||||
const {options} = await t.context.getConfig();
|
||||
const {options: results} = await t.context.getConfig({cwd});
|
||||
|
||||
// Verify the options contains the plugin config from shareable1.json and shareable2.json
|
||||
t.deepEqual(options, {...shareable1, ...shareable2});
|
||||
t.deepEqual(results, {...options1, ...options2});
|
||||
// Verify the plugins module is called with the plugin options from shareable1.json and shareable2.json
|
||||
t.deepEqual(t.context.plugins.args[0][0], {...shareable1, ...shareable2});
|
||||
t.deepEqual(t.context.plugins.args[0][0], {cwd, options: {...options1, ...options2}});
|
||||
t.deepEqual(t.context.plugins.args[0][1], {
|
||||
verifyRelease1: './shareable1.json',
|
||||
verifyRelease2: './shareable2.json',
|
||||
@ -306,14 +280,16 @@ test.serial('Read configuration from an array of paths in "extends"', async t =>
|
||||
});
|
||||
});
|
||||
|
||||
test.serial('Prioritize configuration from config file over "extends"', async t => {
|
||||
const release = {
|
||||
test('Prioritize configuration from config file over "extends"', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const {cwd} = await gitRepo();
|
||||
const pkgOptions = {
|
||||
extends: './shareable.json',
|
||||
branch: 'test_pkg',
|
||||
generateNotes: 'generateNotes',
|
||||
publish: [{path: 'publishPkg', param: 'publishPkg_param'}],
|
||||
};
|
||||
const shareable = {
|
||||
const options1 = {
|
||||
analyzeCommits: 'analyzeCommits',
|
||||
generateNotes: 'generateNotesShareable',
|
||||
publish: [{path: 'publishShareable', param: 'publishShareable_param'}],
|
||||
@ -321,19 +297,16 @@ test.serial('Prioritize configuration from config file over "extends"', async t
|
||||
repositoryUrl: 'https://host.null/owner/module.git',
|
||||
tagFormat: `v\${version}`,
|
||||
};
|
||||
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
// Create package.json and shareable.json in repository root
|
||||
await outputJson('./package.json', {release});
|
||||
await outputJson('./shareable.json', shareable);
|
||||
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
|
||||
await outputJson(path.resolve(cwd, 'shareable.json'), options1);
|
||||
|
||||
const {options} = await t.context.getConfig();
|
||||
const {options} = await t.context.getConfig({cwd});
|
||||
|
||||
// Verify the options contains the plugin config from package.json and shareable.json
|
||||
t.deepEqual(options, omit({...shareable, ...release}, 'extends'));
|
||||
t.deepEqual(options, omit({...options1, ...pkgOptions}, 'extends'));
|
||||
// Verify the plugins module is called with the plugin options from package.json and shareable.json
|
||||
t.deepEqual(t.context.plugins.args[0][0], omit({...shareable, ...release}, 'extends'));
|
||||
t.deepEqual(t.context.plugins.args[0][0], {cwd, options: omit({...options, ...pkgOptions}, 'extends')});
|
||||
t.deepEqual(t.context.plugins.args[0][1], {
|
||||
analyzeCommits: './shareable.json',
|
||||
generateNotesShareable: './shareable.json',
|
||||
@ -341,75 +314,79 @@ test.serial('Prioritize configuration from config file over "extends"', async t
|
||||
});
|
||||
});
|
||||
|
||||
test.serial('Prioritize configuration from cli/API options over "extends"', async t => {
|
||||
const opts = {
|
||||
test('Prioritize configuration from cli/API options over "extends"', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const {cwd} = await gitRepo();
|
||||
const cliOptions = {
|
||||
extends: './shareable2.json',
|
||||
branch: 'branch_opts',
|
||||
publish: [{path: 'publishOpts', param: 'publishOpts_param'}],
|
||||
repositoryUrl: 'https://host.null/owner/module.git',
|
||||
};
|
||||
const release = {
|
||||
const pkgOptions = {
|
||||
extends: './shareable1.json',
|
||||
branch: 'branch_pkg',
|
||||
generateNotes: 'generateNotes',
|
||||
publish: [{path: 'publishPkg', param: 'publishPkg_param'}],
|
||||
};
|
||||
const shareable1 = {
|
||||
const options1 = {
|
||||
analyzeCommits: 'analyzeCommits1',
|
||||
generateNotes: 'generateNotesShareable1',
|
||||
publish: [{path: 'publishShareable', param: 'publishShareable_param1'}],
|
||||
branch: 'test_branch1',
|
||||
repositoryUrl: 'https://host.null/owner/module.git',
|
||||
};
|
||||
const shareable2 = {
|
||||
const options2 = {
|
||||
analyzeCommits: 'analyzeCommits2',
|
||||
publish: [{path: 'publishShareable', param: 'publishShareable_param2'}],
|
||||
branch: 'test_branch2',
|
||||
tagFormat: `v\${version}`,
|
||||
};
|
||||
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
// Create package.json, shareable1.json and shareable2.json in repository root
|
||||
await outputJson('./package.json', {release});
|
||||
await outputJson('./shareable1.json', shareable1);
|
||||
await outputJson('./shareable2.json', shareable2);
|
||||
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
|
||||
await outputJson(path.resolve(cwd, 'shareable1.json'), options1);
|
||||
await outputJson(path.resolve(cwd, 'shareable2.json'), options2);
|
||||
|
||||
const {options} = await t.context.getConfig(opts);
|
||||
const {options} = await t.context.getConfig({cwd}, cliOptions);
|
||||
|
||||
// Verify the options contains the plugin config from package.json and shareable2.json
|
||||
t.deepEqual(options, omit({...shareable2, ...release, ...opts}, 'extends'));
|
||||
t.deepEqual(options, omit({...options2, ...pkgOptions, ...cliOptions}, 'extends'));
|
||||
// Verify the plugins module is called with the plugin options from package.json and shareable2.json
|
||||
t.deepEqual(t.context.plugins.args[0][0], omit({...shareable2, ...release, ...opts}, 'extends'));
|
||||
t.deepEqual(t.context.plugins.args[0][0], {
|
||||
cwd,
|
||||
options: omit({...options2, ...pkgOptions, ...cliOptions}, 'extends'),
|
||||
});
|
||||
});
|
||||
|
||||
test.serial('Allow to unset properties defined in shareable config with "null"', async t => {
|
||||
const release = {
|
||||
test('Allow to unset properties defined in shareable config with "null"', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const {cwd} = await gitRepo();
|
||||
const pkgOptions = {
|
||||
extends: './shareable.json',
|
||||
analyzeCommits: null,
|
||||
branch: 'test_branch',
|
||||
repositoryUrl: 'https://host.null/owner/module.git',
|
||||
};
|
||||
const shareable = {
|
||||
const options1 = {
|
||||
generateNotes: 'generateNotes',
|
||||
analyzeCommits: {path: 'analyzeCommits', param: 'analyzeCommits_param'},
|
||||
tagFormat: `v\${version}`,
|
||||
};
|
||||
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
// Create package.json and shareable.json in repository root
|
||||
await outputJson('./package.json', {release});
|
||||
await outputJson('./shareable.json', shareable);
|
||||
await outputJson(path.resolve(cwd, 'package.json'), {release: pkgOptions});
|
||||
await outputJson(path.resolve(cwd, 'shareable.json'), options1);
|
||||
|
||||
const {options} = await t.context.getConfig();
|
||||
const {options} = await t.context.getConfig({cwd});
|
||||
|
||||
// Verify the options contains the plugin config from shareable.json
|
||||
t.deepEqual(options, {...omit(shareable, 'analyzeCommits'), ...omit(release, ['extends', 'analyzeCommits'])});
|
||||
t.deepEqual(options, {...omit(options1, 'analyzeCommits'), ...omit(pkgOptions, ['extends', 'analyzeCommits'])});
|
||||
// Verify the plugins module is called with the plugin options from shareable.json
|
||||
t.deepEqual(t.context.plugins.args[0][0], {
|
||||
...omit(shareable, 'analyzeCommits'),
|
||||
...omit(release, ['extends', 'analyzeCommits']),
|
||||
options: {
|
||||
...omit(options1, 'analyzeCommits'),
|
||||
...omit(pkgOptions, ['extends', 'analyzeCommits']),
|
||||
},
|
||||
cwd,
|
||||
});
|
||||
t.deepEqual(t.context.plugins.args[0][1], {
|
||||
generateNotes: './shareable.json',
|
||||
@ -417,34 +394,35 @@ test.serial('Allow to unset properties defined in shareable config with "null"',
|
||||
});
|
||||
});
|
||||
|
||||
test.serial('Allow to unset properties defined in shareable config with "undefined"', async t => {
|
||||
const release = {
|
||||
test('Allow to unset properties defined in shareable config with "undefined"', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const {cwd} = await gitRepo();
|
||||
const pkgOptions = {
|
||||
extends: './shareable.json',
|
||||
analyzeCommits: undefined,
|
||||
branch: 'test_branch',
|
||||
repositoryUrl: 'https://host.null/owner/module.git',
|
||||
};
|
||||
const shareable = {
|
||||
const options1 = {
|
||||
generateNotes: 'generateNotes',
|
||||
analyzeCommits: {path: 'analyzeCommits', param: 'analyzeCommits_param'},
|
||||
tagFormat: `v\${version}`,
|
||||
};
|
||||
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
// Create package.json and release.config.js in repository root
|
||||
// await outputJson('./package.json', {release});
|
||||
await writeFile('release.config.js', `module.exports = ${format(release)}`);
|
||||
await outputJson('./shareable.json', shareable);
|
||||
await writeFile(path.resolve(cwd, 'release.config.js'), `module.exports = ${format(pkgOptions)}`);
|
||||
await outputJson(path.resolve(cwd, 'shareable.json'), options1);
|
||||
|
||||
const {options} = await t.context.getConfig();
|
||||
const {options} = await t.context.getConfig({cwd});
|
||||
|
||||
// Verify the options contains the plugin config from shareable.json
|
||||
t.deepEqual(options, {...omit(shareable, 'analyzeCommits'), ...omit(release, ['extends', 'analyzeCommits'])});
|
||||
t.deepEqual(options, {...omit(options1, 'analyzeCommits'), ...omit(pkgOptions, ['extends', 'analyzeCommits'])});
|
||||
// Verify the plugins module is called with the plugin options from shareable.json
|
||||
t.deepEqual(t.context.plugins.args[0][0], {
|
||||
...omit(shareable, 'analyzeCommits'),
|
||||
...omit(release, ['extends', 'analyzeCommits']),
|
||||
options: {
|
||||
...omit(options1, 'analyzeCommits'),
|
||||
...omit(pkgOptions, ['extends', 'analyzeCommits']),
|
||||
},
|
||||
cwd,
|
||||
});
|
||||
t.deepEqual(t.context.plugins.args[0][1], {
|
||||
generateNotes: './shareable.json',
|
||||
@ -452,17 +430,16 @@ test.serial('Allow to unset properties defined in shareable config with "undefin
|
||||
});
|
||||
});
|
||||
|
||||
test.serial('Throw an Error if one of the shareable config cannot be found', async t => {
|
||||
const release = {extends: ['./shareable1.json', 'non-existing-path']};
|
||||
const shareable = {analyzeCommits: 'analyzeCommits'};
|
||||
|
||||
test('Throw an Error if one of the shareable config cannot be found', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
const pkhOptions = {extends: ['./shareable1.json', 'non-existing-path']};
|
||||
const options1 = {analyzeCommits: 'analyzeCommits'};
|
||||
// Create package.json and shareable.json in repository root
|
||||
await outputJson('./package.json', {release});
|
||||
await outputJson('./shareable1.json', shareable);
|
||||
await outputJson(path.resolve(cwd, 'package.json'), {release: pkhOptions});
|
||||
await outputJson(path.resolve(cwd, 'shareable1.json'), options1);
|
||||
|
||||
const error = await t.throws(t.context.getConfig(), Error);
|
||||
const error = await t.throws(t.context.getConfig({cwd}), Error);
|
||||
|
||||
t.is(error.message, "Cannot find module 'non-existing-path'");
|
||||
t.is(error.code, 'MODULE_NOT_FOUND');
|
||||
|
@ -2,194 +2,266 @@ import test from 'ava';
|
||||
import getAuthUrl from '../lib/get-git-auth-url';
|
||||
import {gitRepo} from './helpers/git-utils';
|
||||
|
||||
// Save the current process.env
|
||||
const envBackup = Object.assign({}, process.env);
|
||||
// Save the current working diretory
|
||||
const cwd = process.cwd();
|
||||
const env = {GIT_ASKPASS: 'echo', GIT_TERMINAL_PROMPT: 0};
|
||||
|
||||
test.beforeEach(() => {
|
||||
delete process.env.GIT_CREDENTIALS;
|
||||
delete process.env.GH_TOKEN;
|
||||
delete process.env.GITHUB_TOKEN;
|
||||
delete process.env.GL_TOKEN;
|
||||
delete process.env.GITLAB_TOKEN;
|
||||
delete process.env.BB_TOKEN;
|
||||
delete process.env.BITBUCKET_TOKEN;
|
||||
process.env.GIT_ASKPASS = 'echo';
|
||||
process.env.GIT_TERMINAL_PROMPT = 0;
|
||||
});
|
||||
test('Return the same "git" formatted URL if "gitCredentials" is not defined', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
test.afterEach.always(() => {
|
||||
// Restore process.env
|
||||
process.env = envBackup;
|
||||
// Restore the current working directory
|
||||
process.chdir(cwd);
|
||||
});
|
||||
|
||||
test.serial('Return the same "git" formatted URL if "gitCredentials" is not defined', async t => {
|
||||
t.is(await getAuthUrl({repositoryUrl: 'git@host.null:owner/repo.git'}), 'git@host.null:owner/repo.git');
|
||||
});
|
||||
|
||||
test.serial('Return the same "https" formatted URL if "gitCredentials" is not defined', async t => {
|
||||
t.is(await getAuthUrl({repositoryUrl: 'https://host.null/owner/repo.git'}), 'https://host.null/owner/repo.git');
|
||||
});
|
||||
|
||||
test.serial(
|
||||
'Return the "https" formatted URL if "gitCredentials" is not defined and repositoryUrl is a "git+https" URL',
|
||||
async t => {
|
||||
t.is(await getAuthUrl({repositoryUrl: 'git+https://host.null/owner/repo.git'}), 'https://host.null/owner/repo.git');
|
||||
}
|
||||
);
|
||||
|
||||
test.serial('Do not add trailing ".git" if not present in the origian URL', async t => {
|
||||
t.is(await getAuthUrl({repositoryUrl: 'git@host.null:owner/repo'}), 'git@host.null:owner/repo');
|
||||
});
|
||||
|
||||
test.serial('Handle "https" URL with group and subgroup', async t => {
|
||||
t.is(
|
||||
await getAuthUrl({repositoryUrl: 'https://host.null/group/subgroup/owner/repo.git'}),
|
||||
await getAuthUrl({cwd, env, options: {branch: 'master', repositoryUrl: 'git@host.null:owner/repo.git'}}),
|
||||
'git@host.null:owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test('Return the same "https" formatted URL if "gitCredentials" is not defined', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({cwd, env, options: {branch: 'master', repositoryUrl: 'https://host.null/owner/repo.git'}}),
|
||||
'https://host.null/owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test('Return the "https" formatted URL if "gitCredentials" is not defined and repositoryUrl is a "git+https" URL', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({cwd, env, options: {branch: 'master', repositoryUrl: 'git+https://host.null/owner/repo.git'}}),
|
||||
'https://host.null/owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test('Do not add trailing ".git" if not present in the origian URL', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({cwd, env, options: {branch: 'master', repositoryUrl: 'git@host.null:owner/repo'}}),
|
||||
'git@host.null:owner/repo'
|
||||
);
|
||||
});
|
||||
|
||||
test('Handle "https" URL with group and subgroup', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({
|
||||
cwd,
|
||||
env,
|
||||
options: {branch: 'master', repositoryUrl: 'https://host.null/group/subgroup/owner/repo.git'},
|
||||
}),
|
||||
'https://host.null/group/subgroup/owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Handle "git" URL with group and subgroup', async t => {
|
||||
test('Handle "git" URL with group and subgroup', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({repositoryUrl: 'git@host.null:group/subgroup/owner/repo.git'}),
|
||||
await getAuthUrl({
|
||||
cwd,
|
||||
env,
|
||||
options: {branch: 'master', repositoryUrl: 'git@host.null:group/subgroup/owner/repo.git'},
|
||||
}),
|
||||
'git@host.null:group/subgroup/owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Convert shorthand URL', async t => {
|
||||
test('Convert shorthand URL', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({repositoryUrl: 'semanitc-release/semanitc-release'}),
|
||||
await getAuthUrl({cwd, env, options: {repositoryUrl: 'semanitc-release/semanitc-release'}}),
|
||||
'https://github.com/semanitc-release/semanitc-release.git'
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Convert GitLab shorthand URL', async t => {
|
||||
test('Convert GitLab shorthand URL', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({repositoryUrl: 'gitlab:semanitc-release/semanitc-release'}),
|
||||
await getAuthUrl({
|
||||
cwd,
|
||||
env,
|
||||
options: {branch: 'master', repositoryUrl: 'gitlab:semanitc-release/semanitc-release'},
|
||||
}),
|
||||
'https://gitlab.com/semanitc-release/semanitc-release.git'
|
||||
);
|
||||
});
|
||||
|
||||
test.serial(
|
||||
'Return the "https" formatted URL if "gitCredentials" is defined and repositoryUrl is a "git" URL',
|
||||
async t => {
|
||||
process.env.GIT_CREDENTIALS = 'user:pass';
|
||||
test('Return the "https" formatted URL if "gitCredentials" is defined and repositoryUrl is a "git" URL', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({repositoryUrl: 'git@host.null:owner/repo.git'}),
|
||||
await getAuthUrl({
|
||||
cwd,
|
||||
env: {...env, GIT_CREDENTIALS: 'user:pass'},
|
||||
options: {branch: 'master', repositoryUrl: 'git@host.null:owner/repo.git'},
|
||||
}),
|
||||
'https://user:pass@host.null/owner/repo.git'
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
test.serial(
|
||||
'Return the "https" formatted URL if "gitCredentials" is defined and repositoryUrl is a "https" URL',
|
||||
async t => {
|
||||
process.env.GIT_CREDENTIALS = 'user:pass';
|
||||
t.is(
|
||||
await getAuthUrl({repositoryUrl: 'https://host.null/owner/repo.git'}),
|
||||
'https://user:pass@host.null/owner/repo.git'
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
test.serial(
|
||||
'Return the "http" formatted URL if "gitCredentials" is defined and repositoryUrl is a "http" URL',
|
||||
async t => {
|
||||
process.env.GIT_CREDENTIALS = 'user:pass';
|
||||
t.is(
|
||||
await getAuthUrl({repositoryUrl: 'http://host.null/owner/repo.git'}),
|
||||
'http://user:pass@host.null/owner/repo.git'
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
test.serial(
|
||||
'Return the "https" formatted URL if "gitCredentials" is defined and repositoryUrl is a "git+https" URL',
|
||||
async t => {
|
||||
process.env.GIT_CREDENTIALS = 'user:pass';
|
||||
t.is(
|
||||
await getAuthUrl({repositoryUrl: 'git+https://host.null/owner/repo.git'}),
|
||||
'https://user:pass@host.null/owner/repo.git'
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
test.serial(
|
||||
'Return the "http" formatted URL if "gitCredentials" is defined and repositoryUrl is a "git+http" URL',
|
||||
async t => {
|
||||
process.env.GIT_CREDENTIALS = 'user:pass';
|
||||
t.is(
|
||||
await getAuthUrl({repositoryUrl: 'git+http://host.null/owner/repo.git'}),
|
||||
'http://user:pass@host.null/owner/repo.git'
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
test.serial('Return the "https" formatted URL if "gitCredentials" is defined with "GH_TOKEN"', async t => {
|
||||
process.env.GH_TOKEN = 'token';
|
||||
t.is(await getAuthUrl({repositoryUrl: 'git@host.null:owner/repo.git'}), 'https://token@host.null/owner/repo.git');
|
||||
});
|
||||
|
||||
test.serial('Return the "https" formatted URL if "gitCredentials" is defined with "GITHUB_TOKEN"', async t => {
|
||||
process.env.GITHUB_TOKEN = 'token';
|
||||
t.is(await getAuthUrl({repositoryUrl: 'git@host.null:owner/repo.git'}), 'https://token@host.null/owner/repo.git');
|
||||
test('Return the "https" formatted URL if "gitCredentials" is defined and repositoryUrl is a "https" URL', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({
|
||||
cwd,
|
||||
env: {...env, GIT_CREDENTIALS: 'user:pass'},
|
||||
options: {branch: 'master', repositoryUrl: 'https://host.null/owner/repo.git'},
|
||||
}),
|
||||
'https://user:pass@host.null/owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Return the "https" formatted URL if "gitCredentials" is defined with "GL_TOKEN"', async t => {
|
||||
process.env.GL_TOKEN = 'token';
|
||||
test('Return the "http" formatted URL if "gitCredentials" is defined and repositoryUrl is a "http" URL', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({repositoryUrl: 'git@host.null:owner/repo.git'}),
|
||||
await getAuthUrl({
|
||||
cwd,
|
||||
env: {...env, GIT_CREDENTIALS: 'user:pass'},
|
||||
options: {branch: 'master', repositoryUrl: 'http://host.null/owner/repo.git'},
|
||||
}),
|
||||
'http://user:pass@host.null/owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test('Return the "https" formatted URL if "gitCredentials" is defined and repositoryUrl is a "git+https" URL', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({
|
||||
cwd,
|
||||
env: {...env, GIT_CREDENTIALS: 'user:pass'},
|
||||
options: {branch: 'master', repositoryUrl: 'git+https://host.null/owner/repo.git'},
|
||||
}),
|
||||
'https://user:pass@host.null/owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test('Return the "http" formatted URL if "gitCredentials" is defined and repositoryUrl is a "git+http" URL', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({
|
||||
cwd,
|
||||
env: {...env, GIT_CREDENTIALS: 'user:pass'},
|
||||
options: {branch: 'master', repositoryUrl: 'git+http://host.null/owner/repo.git'},
|
||||
}),
|
||||
'http://user:pass@host.null/owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test('Return the "https" formatted URL if "gitCredentials" is defined with "GH_TOKEN"', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({
|
||||
cwd,
|
||||
env: {...env, GH_TOKEN: 'token'},
|
||||
options: {branch: 'master', repositoryUrl: 'git@host.null:owner/repo.git'},
|
||||
}),
|
||||
'https://token@host.null/owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test('Return the "https" formatted URL if "gitCredentials" is defined with "GITHUB_TOKEN"', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({
|
||||
cwd,
|
||||
env: {...env, GITHUB_TOKEN: 'token'},
|
||||
options: {branch: 'master', repositoryUrl: 'git@host.null:owner/repo.git'},
|
||||
}),
|
||||
'https://token@host.null/owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test('Return the "https" formatted URL if "gitCredentials" is defined with "GL_TOKEN"', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({
|
||||
cwd,
|
||||
env: {...env, GL_TOKEN: 'token'},
|
||||
options: {branch: 'master', repositoryUrl: 'git@host.null:owner/repo.git'},
|
||||
}),
|
||||
'https://gitlab-ci-token:token@host.null/owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Return the "https" formatted URL if "gitCredentials" is defined with "GITLAB_TOKEN"', async t => {
|
||||
process.env.GITLAB_TOKEN = 'token';
|
||||
test('Return the "https" formatted URL if "gitCredentials" is defined with "GITLAB_TOKEN"', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({repositoryUrl: 'git@host.null:owner/repo.git'}),
|
||||
await getAuthUrl({
|
||||
cwd,
|
||||
env: {...env, GITLAB_TOKEN: 'token'},
|
||||
options: {branch: 'master', repositoryUrl: 'git@host.null:owner/repo.git'},
|
||||
}),
|
||||
'https://gitlab-ci-token:token@host.null/owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Return the "https" formatted URL if "gitCredentials" is defined with "BB_TOKEN"', async t => {
|
||||
process.env.BB_TOKEN = 'token';
|
||||
test('Return the "https" formatted URL if "gitCredentials" is defined with "BB_TOKEN"', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({repositoryUrl: 'git@host.null:owner/repo.git'}),
|
||||
await getAuthUrl({
|
||||
cwd,
|
||||
env: {...env, BB_TOKEN: 'token'},
|
||||
options: {branch: 'master', repositoryUrl: 'git@host.null:owner/repo.git'},
|
||||
}),
|
||||
'https://x-token-auth:token@host.null/owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Return the "https" formatted URL if "gitCredentials" is defined with "BITBUCKET_TOKEN"', async t => {
|
||||
process.env.BITBUCKET_TOKEN = 'token';
|
||||
test('Return the "https" formatted URL if "gitCredentials" is defined with "BITBUCKET_TOKEN"', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({repositoryUrl: 'git@host.null:owner/repo.git'}),
|
||||
await getAuthUrl({
|
||||
cwd,
|
||||
env: {...env, BITBUCKET_TOKEN: 'token'},
|
||||
options: {branch: 'master', repositoryUrl: 'git@host.null:owner/repo.git'},
|
||||
}),
|
||||
'https://x-token-auth:token@host.null/owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Handle "https" URL with group and subgroup, with "GIT_CREDENTIALS"', async t => {
|
||||
process.env.GIT_CREDENTIALS = 'user:pass';
|
||||
test('Handle "https" URL with group and subgroup, with "GIT_CREDENTIALS"', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({repositoryUrl: 'https://host.null/group/subgroup/owner/repo.git'}),
|
||||
await getAuthUrl({
|
||||
cwd,
|
||||
env: {...env, GIT_CREDENTIALS: 'user:pass'},
|
||||
options: {branch: 'master', repositoryUrl: 'https://host.null/group/subgroup/owner/repo.git'},
|
||||
}),
|
||||
'https://user:pass@host.null/group/subgroup/owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Handle "git" URL with group and subgroup, with "GIT_CREDENTIALS', async t => {
|
||||
process.env.GIT_CREDENTIALS = 'user:pass';
|
||||
test('Handle "git" URL with group and subgroup, with "GIT_CREDENTIALS', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.is(
|
||||
await getAuthUrl({repositoryUrl: 'git@host.null:group/subgroup/owner/repo.git'}),
|
||||
await getAuthUrl({
|
||||
cwd,
|
||||
env: {...env, GIT_CREDENTIALS: 'user:pass'},
|
||||
options: {branch: 'master', repositoryUrl: 'git@host.null:group/subgroup/owner/repo.git'},
|
||||
}),
|
||||
'https://user:pass@host.null/group/subgroup/owner/repo.git'
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Do not add git credential to repositoryUrl if push is allowed', async t => {
|
||||
process.env.GIT_CREDENTIALS = 'user:pass';
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
test('Do not add git credential to repositoryUrl if push is allowed', async t => {
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
|
||||
t.is(await getAuthUrl({repositoryUrl}), repositoryUrl);
|
||||
t.is(
|
||||
await getAuthUrl({cwd, env: {...env, GIT_CREDENTIALS: 'user:pass'}, options: {branch: 'master', repositoryUrl}}),
|
||||
repositoryUrl
|
||||
);
|
||||
});
|
||||
|
@ -3,155 +3,147 @@ import {stub} from 'sinon';
|
||||
import getLastRelease from '../lib/get-last-release';
|
||||
import {gitRepo, gitCommits, gitTagVersion, gitCheckout} from './helpers/git-utils';
|
||||
|
||||
// Save the current working diretory
|
||||
const cwd = process.cwd();
|
||||
|
||||
test.beforeEach(t => {
|
||||
// Stub the logger functions
|
||||
t.context.log = stub();
|
||||
t.context.logger = {log: t.context.log};
|
||||
});
|
||||
|
||||
test.afterEach.always(() => {
|
||||
// Restore the current working directory
|
||||
process.chdir(cwd);
|
||||
});
|
||||
|
||||
test.serial('Get the highest non-prerelease valid tag', async t => {
|
||||
test('Get the highest non-prerelease valid tag', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
// Create some commits and tags
|
||||
await gitCommits(['First']);
|
||||
await gitTagVersion('foo');
|
||||
const commits = await gitCommits(['Second']);
|
||||
await gitTagVersion('v2.0.0');
|
||||
await gitCommits(['Third']);
|
||||
await gitTagVersion('v1.0.0');
|
||||
await gitCommits(['Fourth']);
|
||||
await gitTagVersion('v3.0');
|
||||
await gitCommits(['Fifth']);
|
||||
await gitTagVersion('v3.0.0-beta.1');
|
||||
await gitCommits(['First'], {cwd});
|
||||
await gitTagVersion('foo', undefined, {cwd});
|
||||
const commits = await gitCommits(['Second'], {cwd});
|
||||
await gitTagVersion('v2.0.0', undefined, {cwd});
|
||||
await gitCommits(['Third'], {cwd});
|
||||
await gitTagVersion('v1.0.0', undefined, {cwd});
|
||||
await gitCommits(['Fourth'], {cwd});
|
||||
await gitTagVersion('v3.0', undefined, {cwd});
|
||||
await gitCommits(['Fifth'], {cwd});
|
||||
await gitTagVersion('v3.0.0-beta.1', undefined, {cwd});
|
||||
|
||||
const result = await getLastRelease(`v\${version}`, t.context.logger);
|
||||
const result = await getLastRelease({cwd, options: {tagFormat: `v\${version}`}, logger: t.context.logger});
|
||||
|
||||
t.deepEqual(result, {gitHead: commits[0].hash, gitTag: 'v2.0.0', version: '2.0.0'});
|
||||
t.deepEqual(t.context.log.args[0], ['Found git tag %s associated with version %s', 'v2.0.0', '2.0.0']);
|
||||
});
|
||||
|
||||
test.serial('Get the highest tag in the history of the current branch', async t => {
|
||||
test('Get the highest tag in the history of the current branch', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
// Add commit to the master branch
|
||||
await gitCommits(['First']);
|
||||
await gitCommits(['First'], {cwd});
|
||||
// Create the tag corresponding to version 1.0.0
|
||||
// Create the new branch 'other-branch' from master
|
||||
await gitCheckout('other-branch');
|
||||
await gitCheckout('other-branch', true, {cwd});
|
||||
// Add commit to the 'other-branch' branch
|
||||
await gitCommits(['Second']);
|
||||
await gitCommits(['Second'], {cwd});
|
||||
// Create the tag corresponding to version 3.0.0
|
||||
await gitTagVersion('v3.0.0');
|
||||
await gitTagVersion('v3.0.0', undefined, {cwd});
|
||||
// Checkout master
|
||||
await gitCheckout('master', false);
|
||||
await gitCheckout('master', false, {cwd});
|
||||
// Add another commit to the master branch
|
||||
const commits = await gitCommits(['Third']);
|
||||
const commits = await gitCommits(['Third'], {cwd});
|
||||
// Create the tag corresponding to version 2.0.0
|
||||
await gitTagVersion('v2.0.0');
|
||||
await gitTagVersion('v2.0.0', undefined, {cwd});
|
||||
|
||||
const result = await getLastRelease(`v\${version}`, t.context.logger);
|
||||
const result = await getLastRelease({cwd, options: {tagFormat: `v\${version}`}, logger: t.context.logger});
|
||||
|
||||
t.deepEqual(result, {gitHead: commits[0].hash, gitTag: 'v2.0.0', version: '2.0.0'});
|
||||
});
|
||||
|
||||
test.serial('Match the tag name from the begining of the string', async t => {
|
||||
test('Match the tag name from the begining of the string', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const commits = await gitCommits(['First']);
|
||||
await gitTagVersion('prefix/v1.0.0');
|
||||
await gitTagVersion('prefix/v2.0.0');
|
||||
await gitTagVersion('other-prefix/v3.0.0');
|
||||
const {cwd} = await gitRepo();
|
||||
const commits = await gitCommits(['First'], {cwd});
|
||||
await gitTagVersion('prefix/v1.0.0', undefined, {cwd});
|
||||
await gitTagVersion('prefix/v2.0.0', undefined, {cwd});
|
||||
await gitTagVersion('other-prefix/v3.0.0', undefined, {cwd});
|
||||
|
||||
const result = await getLastRelease(`prefix/v\${version}`, t.context.logger);
|
||||
const result = await getLastRelease({cwd, options: {tagFormat: `prefix/v\${version}`}, logger: t.context.logger});
|
||||
|
||||
t.deepEqual(result, {gitHead: commits[0].hash, gitTag: 'prefix/v2.0.0', version: '2.0.0'});
|
||||
});
|
||||
|
||||
test.serial('Return empty object if no valid tag is found', async t => {
|
||||
test('Return empty object if no valid tag is found', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
// Create some commits and tags
|
||||
await gitCommits(['First']);
|
||||
await gitTagVersion('foo');
|
||||
await gitCommits(['Second']);
|
||||
await gitTagVersion('v2.0.x');
|
||||
await gitCommits(['Third']);
|
||||
await gitTagVersion('v3.0');
|
||||
await gitCommits(['First'], {cwd});
|
||||
await gitTagVersion('foo', undefined, {cwd});
|
||||
await gitCommits(['Second'], {cwd});
|
||||
await gitTagVersion('v2.0.x', undefined, {cwd});
|
||||
await gitCommits(['Third'], {cwd});
|
||||
await gitTagVersion('v3.0', undefined, {cwd});
|
||||
|
||||
const result = await getLastRelease(`v\${version}`, t.context.logger);
|
||||
const result = await getLastRelease({cwd, options: {tagFormat: `v\${version}`}, logger: t.context.logger});
|
||||
|
||||
t.deepEqual(result, {});
|
||||
t.is(t.context.log.args[0][0], 'No git tag version found');
|
||||
});
|
||||
|
||||
test.serial('Return empty object if no valid tag is found in history', async t => {
|
||||
test('Return empty object if no valid tag is found in history', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
await gitCommits(['First']);
|
||||
await gitCheckout('other-branch');
|
||||
await gitCommits(['Second']);
|
||||
await gitTagVersion('v1.0.0');
|
||||
await gitTagVersion('v2.0.0');
|
||||
await gitTagVersion('v3.0.0');
|
||||
await gitCheckout('master', false);
|
||||
const {cwd} = await gitRepo();
|
||||
await gitCommits(['First'], {cwd});
|
||||
await gitCheckout('other-branch', true, {cwd});
|
||||
await gitCommits(['Second'], {cwd});
|
||||
await gitTagVersion('v1.0.0', undefined, {cwd});
|
||||
await gitTagVersion('v2.0.0', undefined, {cwd});
|
||||
await gitTagVersion('v3.0.0', undefined, {cwd});
|
||||
await gitCheckout('master', false, {cwd});
|
||||
|
||||
const result = await getLastRelease(`v\${version}`, t.context.logger);
|
||||
const result = await getLastRelease({cwd, options: {tagFormat: `v\${version}`}, logger: t.context.logger});
|
||||
|
||||
t.deepEqual(result, {});
|
||||
t.is(t.context.log.args[0][0], 'No git tag version found');
|
||||
});
|
||||
|
||||
test.serial('Get the highest valid tag corresponding to the "tagFormat"', async t => {
|
||||
test('Get the highest valid tag corresponding to the "tagFormat"', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
// Create some commits and tags
|
||||
const [{hash: gitHead}] = await gitCommits(['First']);
|
||||
const [{hash: gitHead}] = await gitCommits(['First'], {cwd});
|
||||
|
||||
await gitTagVersion('1.0.0');
|
||||
t.deepEqual(await getLastRelease(`\${version}`, t.context.logger), {
|
||||
await gitTagVersion('1.0.0', undefined, {cwd});
|
||||
t.deepEqual(await getLastRelease({cwd, options: {tagFormat: `\${version}`}, logger: t.context.logger}), {
|
||||
gitHead,
|
||||
gitTag: '1.0.0',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
await gitTagVersion('foo-1.0.0-bar');
|
||||
t.deepEqual(await getLastRelease(`foo-\${version}-bar`, t.context.logger), {
|
||||
await gitTagVersion('foo-1.0.0-bar', undefined, {cwd});
|
||||
t.deepEqual(await getLastRelease({cwd, options: {tagFormat: `foo-\${version}-bar`}, logger: t.context.logger}), {
|
||||
gitHead,
|
||||
gitTag: 'foo-1.0.0-bar',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
await gitTagVersion('foo-v1.0.0-bar');
|
||||
t.deepEqual(await getLastRelease(`foo-v\${version}-bar`, t.context.logger), {
|
||||
await gitTagVersion('foo-v1.0.0-bar', undefined, {cwd});
|
||||
t.deepEqual(await getLastRelease({cwd, options: {tagFormat: `foo-v\${version}-bar`}, logger: t.context.logger}), {
|
||||
gitHead,
|
||||
gitTag: 'foo-v1.0.0-bar',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
await gitTagVersion('(.+)/1.0.0/(a-z)');
|
||||
t.deepEqual(await getLastRelease(`(.+)/\${version}/(a-z)`, t.context.logger), {
|
||||
await gitTagVersion('(.+)/1.0.0/(a-z)', undefined, {cwd});
|
||||
t.deepEqual(await getLastRelease({cwd, options: {tagFormat: `(.+)/\${version}/(a-z)`}, logger: t.context.logger}), {
|
||||
gitHead,
|
||||
gitTag: '(.+)/1.0.0/(a-z)',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
await gitTagVersion('2.0.0-1.0.0-bar.1');
|
||||
t.deepEqual(await getLastRelease(`2.0.0-\${version}-bar.1`, t.context.logger), {
|
||||
await gitTagVersion('2.0.0-1.0.0-bar.1', undefined, {cwd});
|
||||
t.deepEqual(await getLastRelease({cwd, options: {tagFormat: `2.0.0-\${version}-bar.1`}, logger: t.context.logger}), {
|
||||
gitHead,
|
||||
gitTag: '2.0.0-1.0.0-bar.1',
|
||||
version: '1.0.0',
|
||||
});
|
||||
|
||||
await gitTagVersion('3.0.0-bar.1');
|
||||
t.deepEqual(await getLastRelease(`\${version}-bar.1`, t.context.logger), {
|
||||
await gitTagVersion('3.0.0-bar.1', undefined, {cwd});
|
||||
t.deepEqual(await getLastRelease({cwd, options: {tagFormat: `\${version}-bar.1`}, logger: t.context.logger}), {
|
||||
gitHead,
|
||||
gitTag: '3.0.0-bar.1',
|
||||
version: '3.0.0',
|
||||
|
@ -9,21 +9,33 @@ test.beforeEach(t => {
|
||||
});
|
||||
|
||||
test('Increase version for patch release', t => {
|
||||
const version = getNextVersion('patch', {version: '1.0.0'}, t.context.logger);
|
||||
const version = getNextVersion({
|
||||
nextRelease: {type: 'patch'},
|
||||
lastRelease: {version: '1.0.0'},
|
||||
logger: t.context.logger,
|
||||
});
|
||||
t.is(version, '1.0.1');
|
||||
});
|
||||
|
||||
test('Increase version for minor release', t => {
|
||||
const version = getNextVersion('minor', {version: '1.0.0'}, t.context.logger);
|
||||
const version = getNextVersion({
|
||||
nextRelease: {type: 'minor'},
|
||||
lastRelease: {version: '1.0.0'},
|
||||
logger: t.context.logger,
|
||||
});
|
||||
t.is(version, '1.1.0');
|
||||
});
|
||||
|
||||
test('Increase version for major release', t => {
|
||||
const version = getNextVersion('major', {version: '1.0.0'}, t.context.logger);
|
||||
const version = getNextVersion({
|
||||
nextRelease: {type: 'major'},
|
||||
lastRelease: {version: '1.0.0'},
|
||||
logger: t.context.logger,
|
||||
});
|
||||
t.is(version, '2.0.0');
|
||||
});
|
||||
|
||||
test('Return 1.0.0 if there is no previous release', t => {
|
||||
const version = getNextVersion('minor', {}, t.context.logger);
|
||||
const version = getNextVersion({nextRelease: {type: 'minor'}, lastRelease: {}, logger: t.context.logger});
|
||||
t.is(version, '1.0.0');
|
||||
});
|
||||
|
213
test/git.test.js
213
test/git.test.js
@ -27,214 +27,203 @@ import {
|
||||
gitDetachedHead,
|
||||
} from './helpers/git-utils';
|
||||
|
||||
// Save the current working diretory
|
||||
const cwd = process.cwd();
|
||||
|
||||
test.afterEach.always(() => {
|
||||
// Restore the current working directory
|
||||
process.chdir(cwd);
|
||||
});
|
||||
|
||||
test.serial('Get the last commit sha', async t => {
|
||||
test('Get the last commit sha', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
// Add commits to the master branch
|
||||
const commits = await gitCommits(['First']);
|
||||
const commits = await gitCommits(['First'], {cwd});
|
||||
|
||||
const result = await gitHead();
|
||||
const result = await gitHead({cwd});
|
||||
|
||||
t.is(result, commits[0].hash);
|
||||
});
|
||||
|
||||
test.serial('Throw error if the last commit sha cannot be found', async t => {
|
||||
test('Throw error if the last commit sha cannot be found', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
await t.throws(gitHead(), Error);
|
||||
await t.throws(gitHead({cwd}), Error);
|
||||
});
|
||||
|
||||
test.serial('Unshallow and fetch repository', async t => {
|
||||
test('Unshallow and fetch repository', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repo = await gitRepo();
|
||||
let {cwd, repositoryUrl} = await gitRepo();
|
||||
// Add commits to the master branch
|
||||
await gitCommits(['First', 'Second']);
|
||||
await gitCommits(['First', 'Second'], {cwd});
|
||||
// Create a shallow clone with only 1 commit
|
||||
await gitShallowClone(repo);
|
||||
cwd = await gitShallowClone(repositoryUrl);
|
||||
|
||||
// Verify the shallow clone contains only one commit
|
||||
t.is((await gitGetCommits()).length, 1);
|
||||
t.is((await gitGetCommits(undefined, {cwd})).length, 1);
|
||||
|
||||
await fetch(repo);
|
||||
await fetch(repositoryUrl, {cwd});
|
||||
|
||||
// Verify the shallow clone contains all the commits
|
||||
t.is((await gitGetCommits()).length, 2);
|
||||
t.is((await gitGetCommits(undefined, {cwd})).length, 2);
|
||||
});
|
||||
|
||||
test.serial('Do not throw error when unshallow a complete repository', async t => {
|
||||
test('Do not throw error when unshallow a complete repository', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repo = await gitRepo();
|
||||
const {cwd, repositoryUrl} = await gitRepo();
|
||||
// Add commits to the master branch
|
||||
await gitCommits(['First']);
|
||||
await t.notThrows(fetch(repo));
|
||||
await gitCommits(['First'], {cwd});
|
||||
await t.notThrows(fetch(repositoryUrl, {cwd}));
|
||||
});
|
||||
|
||||
test.serial('Fetch all tags on a detached head repository', async t => {
|
||||
const repo = await gitRepo(true);
|
||||
test('Fetch all tags on a detached head repository', async t => {
|
||||
let {cwd, repositoryUrl} = await gitRepo();
|
||||
|
||||
await gitCommits(['First']);
|
||||
await gitTagVersion('v1.0.0');
|
||||
await gitCommits(['Second']);
|
||||
await gitTagVersion('v1.0.1');
|
||||
const [commit] = await gitCommits(['Third']);
|
||||
await gitTagVersion('v1.1.0');
|
||||
await gitPush();
|
||||
await gitDetachedHead(repo, commit.hash);
|
||||
await gitCommits(['First'], {cwd});
|
||||
await gitTagVersion('v1.0.0', undefined, {cwd});
|
||||
await gitCommits(['Second'], {cwd});
|
||||
await gitTagVersion('v1.0.1', undefined, {cwd});
|
||||
const [commit] = await gitCommits(['Third'], {cwd});
|
||||
await gitTagVersion('v1.1.0', undefined, {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
cwd = await gitDetachedHead(repositoryUrl, commit.hash);
|
||||
|
||||
await fetch(repo);
|
||||
await fetch(repositoryUrl, {cwd});
|
||||
|
||||
t.deepEqual((await gitTags()).sort(), ['v1.0.0', 'v1.0.1', 'v1.1.0'].sort());
|
||||
t.deepEqual((await gitTags({cwd})).sort(), ['v1.0.0', 'v1.0.1', 'v1.1.0'].sort());
|
||||
});
|
||||
|
||||
test.serial('Verify if the commit `sha` is in the direct history of the current branch', async t => {
|
||||
test('Verify if the commit `sha` is in the direct history of the current branch', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
// Add commits to the master branch
|
||||
const commits = await gitCommits(['First']);
|
||||
const commits = await gitCommits(['First'], {cwd});
|
||||
// Create the new branch 'other-branch' from master
|
||||
await gitCheckout('other-branch');
|
||||
await gitCheckout('other-branch', true, {cwd});
|
||||
// Add commits to the 'other-branch' branch
|
||||
const otherCommits = await gitCommits(['Second']);
|
||||
await gitCheckout('master', false);
|
||||
const otherCommits = await gitCommits(['Second'], {cwd});
|
||||
await gitCheckout('master', false, {cwd});
|
||||
|
||||
t.true(await isRefInHistory(commits[0].hash));
|
||||
t.falsy(await isRefInHistory(otherCommits[0].hash));
|
||||
await t.throws(isRefInHistory('non-existant-sha'));
|
||||
t.true(await isRefInHistory(commits[0].hash, {cwd}));
|
||||
t.falsy(await isRefInHistory(otherCommits[0].hash, {cwd}));
|
||||
await t.throws(isRefInHistory('non-existant-sha', {cwd}));
|
||||
});
|
||||
|
||||
test.serial('Get the commit sha for a given tag or falsy if the tag does not exists', async t => {
|
||||
test('Get the commit sha for a given tag or falsy if the tag does not exists', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
// Add commits to the master branch
|
||||
const commits = await gitCommits(['First']);
|
||||
const commits = await gitCommits(['First'], {cwd});
|
||||
// Create the tag corresponding to version 1.0.0
|
||||
await gitTagVersion('v1.0.0');
|
||||
await gitTagVersion('v1.0.0', undefined, {cwd});
|
||||
|
||||
t.is(await gitTagHead('v1.0.0'), commits[0].hash);
|
||||
t.falsy(await gitTagHead('missing_tag'));
|
||||
t.is(await gitTagHead('v1.0.0', {cwd}), commits[0].hash);
|
||||
t.falsy(await gitTagHead('missing_tag', {cwd}));
|
||||
});
|
||||
|
||||
test.serial('Return git remote repository url from config', async t => {
|
||||
test('Return git remote repository url from config', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
// Add remote.origin.url config
|
||||
await gitAddConfig('remote.origin.url', 'git@hostname.com:owner/package.git');
|
||||
await gitAddConfig('remote.origin.url', 'git@hostname.com:owner/package.git', {cwd});
|
||||
|
||||
t.is(await repoUrl(), 'git@hostname.com:owner/package.git');
|
||||
t.is(await repoUrl({cwd}), 'git@hostname.com:owner/package.git');
|
||||
});
|
||||
|
||||
test.serial('Return git remote repository url set while cloning', async t => {
|
||||
test('Return git remote repository url set while cloning', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repo = await gitRepo();
|
||||
await gitCommits(['First']);
|
||||
let {cwd, repositoryUrl} = await gitRepo();
|
||||
await gitCommits(['First'], {cwd});
|
||||
// Create a clone
|
||||
await gitShallowClone(repo);
|
||||
cwd = await gitShallowClone(repositoryUrl);
|
||||
|
||||
t.is(await repoUrl(), repo);
|
||||
t.is(await repoUrl({cwd}), repositoryUrl);
|
||||
});
|
||||
|
||||
test.serial('Return falsy if git repository url is not set', async t => {
|
||||
test('Return falsy if git repository url is not set', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
t.falsy(await repoUrl());
|
||||
t.falsy(await repoUrl({cwd}));
|
||||
});
|
||||
|
||||
test.serial('Add tag on head commit', async t => {
|
||||
test('Add tag on head commit', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const commits = await gitCommits(['Test commit']);
|
||||
const {cwd} = await gitRepo();
|
||||
const commits = await gitCommits(['Test commit'], {cwd});
|
||||
|
||||
await tag('tag_name');
|
||||
await tag('tag_name', {cwd});
|
||||
|
||||
await t.is(await gitCommitTag(commits[0].hash), 'tag_name');
|
||||
await t.is(await gitCommitTag(commits[0].hash, {cwd}), 'tag_name');
|
||||
});
|
||||
|
||||
test.serial('Push tag and commit to remote repository', async t => {
|
||||
test('Push tag and commit to remote repository', async t => {
|
||||
// Create a git repository with a remote, set the current working directory at the root of the repo
|
||||
const repo = await gitRepo(true);
|
||||
const commits = await gitCommits(['Test commit']);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
const commits = await gitCommits(['Test commit'], {cwd});
|
||||
|
||||
await tag('tag_name');
|
||||
await push(repo, 'master');
|
||||
await tag('tag_name', {cwd});
|
||||
await push(repositoryUrl, 'master', {cwd});
|
||||
|
||||
t.is(await gitRemoteTagHead(repo, 'tag_name'), commits[0].hash);
|
||||
t.is(await gitRemoteTagHead(repositoryUrl, 'tag_name', {cwd}), commits[0].hash);
|
||||
});
|
||||
|
||||
test.serial('Return "true" if in a Git repository', async t => {
|
||||
test('Return "true" if in a Git repository', async t => {
|
||||
// Create a git repository with a remote, set the current working directory at the root of the repo
|
||||
await gitRepo(true);
|
||||
const {cwd} = await gitRepo(true);
|
||||
|
||||
t.true(await isGitRepo());
|
||||
t.true(await isGitRepo({cwd}));
|
||||
});
|
||||
|
||||
test.serial('Return falsy if not in a Git repository', async t => {
|
||||
const dir = tempy.directory();
|
||||
process.chdir(dir);
|
||||
test('Return falsy if not in a Git repository', async t => {
|
||||
const cwd = tempy.directory();
|
||||
|
||||
t.falsy(await isGitRepo());
|
||||
t.falsy(await isGitRepo({cwd}));
|
||||
});
|
||||
|
||||
test.serial('Return "true" for valid tag names', async t => {
|
||||
test('Return "true" for valid tag names', async t => {
|
||||
t.true(await verifyTagName('1.0.0'));
|
||||
t.true(await verifyTagName('v1.0.0'));
|
||||
t.true(await verifyTagName('tag_name'));
|
||||
t.true(await verifyTagName('tag/name'));
|
||||
});
|
||||
|
||||
test.serial('Return falsy for invalid tag names', async t => {
|
||||
test('Return falsy for invalid tag names', async t => {
|
||||
t.falsy(await verifyTagName('?1.0.0'));
|
||||
t.falsy(await verifyTagName('*1.0.0'));
|
||||
t.falsy(await verifyTagName('[1.0.0]'));
|
||||
t.falsy(await verifyTagName('1.0.0..'));
|
||||
});
|
||||
|
||||
test.serial('Throws error if obtaining the tags fails', async t => {
|
||||
const dir = tempy.directory();
|
||||
process.chdir(dir);
|
||||
test('Throws error if obtaining the tags fails', async t => {
|
||||
const cwd = tempy.directory();
|
||||
|
||||
await t.throws(gitTags());
|
||||
await t.throws(gitTags({cwd}));
|
||||
});
|
||||
|
||||
test.serial('Return "true" if repository is up to date', async t => {
|
||||
await gitRepo(true);
|
||||
await gitCommits(['First']);
|
||||
await gitPush();
|
||||
test('Return "true" if repository is up to date', async t => {
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
await gitCommits(['First'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
t.true(await isBranchUpToDate('master'));
|
||||
t.true(await isBranchUpToDate('master', {cwd}));
|
||||
});
|
||||
|
||||
test.serial('Return falsy if repository is not up to date', async t => {
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const repoDir = process.cwd();
|
||||
await gitCommits(['First']);
|
||||
await gitCommits(['Second']);
|
||||
await gitPush();
|
||||
test('Return falsy if repository is not up to date', async t => {
|
||||
let {cwd, repositoryUrl} = await gitRepo(true);
|
||||
const repoDir = cwd;
|
||||
await gitCommits(['First'], {cwd});
|
||||
await gitCommits(['Second'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
t.true(await isBranchUpToDate('master'));
|
||||
t.true(await isBranchUpToDate('master', {cwd}));
|
||||
|
||||
await gitShallowClone(repositoryUrl);
|
||||
await gitCommits(['Third']);
|
||||
await gitPush();
|
||||
process.chdir(repoDir);
|
||||
cwd = await gitShallowClone(repositoryUrl);
|
||||
await gitCommits(['Third'], {cwd});
|
||||
await gitPush('origin', 'master', {cwd});
|
||||
|
||||
t.falsy(await isBranchUpToDate('master'));
|
||||
t.falsy(await isBranchUpToDate('master', {cwd: repoDir}));
|
||||
});
|
||||
|
||||
test.serial('Return "true" if local repository is ahead', async t => {
|
||||
await gitRepo(true);
|
||||
await gitCommits(['First']);
|
||||
await gitPush();
|
||||
await gitCommits(['Second']);
|
||||
test('Return "true" if local repository is ahead', async t => {
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
await gitCommits(['First'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
await gitCommits(['Second'], {cwd});
|
||||
|
||||
t.true(await isBranchUpToDate('master'));
|
||||
t.true(await isBranchUpToDate('master', {cwd}));
|
||||
});
|
||||
|
@ -24,18 +24,21 @@ import getStream from 'get-stream';
|
||||
* @return {String} The path of the clone if `withRemote` is `true`, the path of the repository otherwise.
|
||||
*/
|
||||
export async function gitRepo(withRemote, branch = 'master') {
|
||||
const dir = tempy.directory();
|
||||
let cwd = tempy.directory();
|
||||
|
||||
process.chdir(dir);
|
||||
await execa('git', ['init'].concat(withRemote ? ['--bare'] : []));
|
||||
await execa('git', ['init'].concat(withRemote ? ['--bare'] : []), {cwd});
|
||||
|
||||
const repositoryUrl = fileUrl(cwd);
|
||||
if (withRemote) {
|
||||
await initBareRepo(fileUrl(dir), branch);
|
||||
await gitShallowClone(fileUrl(dir));
|
||||
await initBareRepo(repositoryUrl, branch);
|
||||
cwd = await gitShallowClone(repositoryUrl, branch);
|
||||
} else {
|
||||
await gitCheckout(branch);
|
||||
await gitCheckout(branch, true, {cwd});
|
||||
}
|
||||
return fileUrl(dir);
|
||||
|
||||
await execa('git', ['config', 'commit.gpgsign', false], {cwd});
|
||||
|
||||
return {cwd, repositoryUrl};
|
||||
}
|
||||
|
||||
/**
|
||||
@ -50,44 +53,41 @@ export async function gitRepo(withRemote, branch = 'master') {
|
||||
* @param {String} [branch='master'] the branch to initialize.
|
||||
*/
|
||||
export async function initBareRepo(repositoryUrl, branch = 'master') {
|
||||
const clone = tempy.directory();
|
||||
await execa('git', ['clone', '--no-hardlinks', repositoryUrl, clone]);
|
||||
process.chdir(clone);
|
||||
await gitCheckout(branch);
|
||||
await gitCommits(['Initial commit']);
|
||||
await execa('git', ['push', repositoryUrl, branch]);
|
||||
const cwd = tempy.directory();
|
||||
await execa('git', ['clone', '--no-hardlinks', repositoryUrl, cwd], {cwd});
|
||||
await gitCheckout(branch, true, {cwd});
|
||||
await gitCommits(['Initial commit'], {cwd});
|
||||
await execa('git', ['push', repositoryUrl, branch], {cwd});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create commits on the current git repository.
|
||||
*
|
||||
* @param {Array<string>} messages commit messages.
|
||||
* @param {Array<string>} messages Commit messages.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @returns {Array<Commit>} The created commits, in reverse order (to match `git log` order).
|
||||
*/
|
||||
export async function gitCommits(messages) {
|
||||
await pReduce(
|
||||
messages,
|
||||
async (commits, msg) => {
|
||||
const stdout = await execa.stdout('git', ['commit', '-m', msg, '--allow-empty', '--no-gpg-sign']);
|
||||
const [, hash] = /^\[(?:\w+)\(?.*?\)?(\w+)\] .+(?:\n|$)/.exec(stdout);
|
||||
commits.push(hash);
|
||||
return commits;
|
||||
},
|
||||
[]
|
||||
export async function gitCommits(messages, execaOpts) {
|
||||
await pReduce(messages, async (_, message) =>
|
||||
execa.stdout('git', ['commit', '-m', message, '--allow-empty', '--no-gpg-sign'], execaOpts)
|
||||
);
|
||||
return (await gitGetCommits()).slice(0, messages.length);
|
||||
return (await gitGetCommits(undefined, execaOpts)).slice(0, messages.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the list of parsed commits since a git reference.
|
||||
*
|
||||
* @param {String} [from] Git reference from which to seach commits.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @return {Array<Object>} The list of parsed commits.
|
||||
*/
|
||||
export async function gitGetCommits(from) {
|
||||
export async function gitGetCommits(from, execaOpts) {
|
||||
Object.assign(gitLogParser.fields, {hash: 'H', message: 'B', gitTags: 'd', committerDate: {key: 'ci', type: Date}});
|
||||
return (await getStream.array(gitLogParser.parse({_: `${from ? from + '..' : ''}HEAD`}))).map(commit => {
|
||||
return (await getStream.array(
|
||||
gitLogParser.parse({_: `${from ? from + '..' : ''}HEAD`}, {...execaOpts, env: {...process.env, ...execaOpts.env}})
|
||||
)).map(commit => {
|
||||
commit.message = commit.message.trim();
|
||||
commit.gitTags = commit.gitTags.trim();
|
||||
return commit;
|
||||
@ -98,17 +98,22 @@ export async function gitGetCommits(from) {
|
||||
* Checkout a branch on the current git repository.
|
||||
*
|
||||
* @param {String} branch Branch name.
|
||||
* @param {Boolean} create `true` to create the branche ans switch, `false` to only switch.
|
||||
* @param {Boolean} create `true` to create the branch, `false` to checkout an existing branch.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*/
|
||||
export async function gitCheckout(branch, create = true) {
|
||||
await execa('git', create ? ['checkout', '-b', branch] : ['checkout', branch]);
|
||||
export async function gitCheckout(branch, create = true, execaOpts) {
|
||||
await execa('git', create ? ['checkout', '-b', branch] : ['checkout', branch], execaOpts);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the HEAD sha.
|
||||
*
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @return {String} The sha of the head commit in the current git repository.
|
||||
*/
|
||||
export async function gitHead() {
|
||||
return execa.stdout('git', ['rev-parse', 'HEAD']);
|
||||
export async function gitHead(execaOpts) {
|
||||
return execa.stdout('git', ['rev-parse', 'HEAD'], execaOpts);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -116,9 +121,10 @@ export async function gitHead() {
|
||||
*
|
||||
* @param {String} tagName The tag name to create.
|
||||
* @param {String} [sha] The commit on which to create the tag. If undefined the tag is created on the last commit.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*/
|
||||
export async function gitTagVersion(tagName, sha) {
|
||||
await execa('git', sha ? ['tag', '-f', tagName, sha] : ['tag', tagName]);
|
||||
export async function gitTagVersion(tagName, sha, execaOpts) {
|
||||
await execa('git', sha ? ['tag', '-f', tagName, sha] : ['tag', tagName], execaOpts);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -131,11 +137,12 @@ export async function gitTagVersion(tagName, sha) {
|
||||
* @return {String} The path of the cloned repository.
|
||||
*/
|
||||
export async function gitShallowClone(repositoryUrl, branch = 'master', depth = 1) {
|
||||
const dir = tempy.directory();
|
||||
const cwd = tempy.directory();
|
||||
|
||||
process.chdir(dir);
|
||||
await execa('git', ['clone', '--no-hardlinks', '--no-tags', '-b', branch, '--depth', depth, repositoryUrl, dir]);
|
||||
return dir;
|
||||
await execa('git', ['clone', '--no-hardlinks', '--no-tags', '-b', branch, '--depth', depth, repositoryUrl, cwd], {
|
||||
cwd,
|
||||
});
|
||||
return cwd;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -146,14 +153,13 @@ export async function gitShallowClone(repositoryUrl, branch = 'master', depth =
|
||||
* @return {String} The path of the new repository.
|
||||
*/
|
||||
export async function gitDetachedHead(repositoryUrl, head) {
|
||||
const dir = tempy.directory();
|
||||
const cwd = tempy.directory();
|
||||
|
||||
process.chdir(dir);
|
||||
await execa('git', ['init']);
|
||||
await execa('git', ['remote', 'add', 'origin', repositoryUrl]);
|
||||
await execa('git', ['fetch', repositoryUrl]);
|
||||
await execa('git', ['checkout', head]);
|
||||
return dir;
|
||||
await execa('git', ['init'], {cwd});
|
||||
await execa('git', ['remote', 'add', 'origin', repositoryUrl], {cwd});
|
||||
await execa('git', ['fetch', repositoryUrl], {cwd});
|
||||
await execa('git', ['checkout', head], {cwd});
|
||||
return cwd;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -161,20 +167,22 @@ export async function gitDetachedHead(repositoryUrl, head) {
|
||||
*
|
||||
* @param {String} name Config name.
|
||||
* @param {String} value Config value.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*/
|
||||
export async function gitAddConfig(name, value) {
|
||||
await execa('git', ['config', '--add', name, value]);
|
||||
export async function gitAddConfig(name, value, execaOpts) {
|
||||
await execa('git', ['config', '--add', name, value], execaOpts);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the first commit sha referenced by the tag `tagName` in the local repository.
|
||||
*
|
||||
* @param {String} tagName Tag name for which to retrieve the commit sha.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @return {String} The sha of the commit associated with `tagName` on the local repository.
|
||||
*/
|
||||
export async function gitTagHead(tagName) {
|
||||
return execa.stdout('git', ['rev-list', '-1', tagName]);
|
||||
export async function gitTagHead(tagName, execaOpts) {
|
||||
return execa.stdout('git', ['rev-list', '-1', tagName], execaOpts);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -182,10 +190,12 @@ export async function gitTagHead(tagName) {
|
||||
*
|
||||
* @param {String} repositoryUrl The repository remote URL.
|
||||
* @param {String} tagName The tag name to seach for.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @return {String} The sha of the commit associated with `tagName` on the remote repository.
|
||||
*/
|
||||
export async function gitRemoteTagHead(repositoryUrl, tagName) {
|
||||
return (await execa.stdout('git', ['ls-remote', '--tags', repositoryUrl, tagName]))
|
||||
export async function gitRemoteTagHead(repositoryUrl, tagName, execaOpts) {
|
||||
return (await execa.stdout('git', ['ls-remote', '--tags', repositoryUrl, tagName], execaOpts))
|
||||
.split('\n')
|
||||
.filter(tag => Boolean(tag))
|
||||
.map(tag => tag.match(/^(\S+)/)[1])[0];
|
||||
@ -195,11 +205,12 @@ export async function gitRemoteTagHead(repositoryUrl, tagName) {
|
||||
* Get the tag associated with a commit sha.
|
||||
*
|
||||
* @param {String} gitHead The commit sha for which to retrieve the associated tag.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @return {String} The tag associatedwith the sha in parameter or `null`.
|
||||
*/
|
||||
export async function gitCommitTag(gitHead) {
|
||||
return execa.stdout('git', ['describe', '--tags', '--exact-match', gitHead]);
|
||||
export async function gitCommitTag(gitHead, execaOpts) {
|
||||
return execa.stdout('git', ['describe', '--tags', '--exact-match', gitHead], execaOpts);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -207,8 +218,10 @@ export async function gitCommitTag(gitHead) {
|
||||
*
|
||||
* @param {String} repositoryUrl The remote repository URL.
|
||||
* @param {String} branch The branch to push.
|
||||
* @param {Object} [execaOpts] Options to pass to `execa`.
|
||||
*
|
||||
* @throws {Error} if the push failed.
|
||||
*/
|
||||
export async function gitPush(repositoryUrl = 'origin', branch = 'master') {
|
||||
await execa('git', ['push', '--tags', repositoryUrl, `HEAD:${branch}`]);
|
||||
export async function gitPush(repositoryUrl = 'origin', branch = 'master', execaOpts) {
|
||||
await execa('git', ['push', '--tags', repositoryUrl, `HEAD:${branch}`], execaOpts);
|
||||
}
|
||||
|
@ -64,8 +64,9 @@ async function createRepo(name, branch = 'master', description = `Repository ${n
|
||||
|
||||
// Retry as the server might take a few ms to make the repo available push
|
||||
await pRetry(() => initBareRepo(authUrl, branch), {retries: 3, minTimeout: 500, factor: 2});
|
||||
await gitShallowClone(authUrl);
|
||||
return {repositoryUrl, authUrl};
|
||||
const cwd = await gitShallowClone(authUrl);
|
||||
|
||||
return {cwd, repositoryUrl, authUrl};
|
||||
}
|
||||
|
||||
export default {start, stop, gitCredential, createRepo};
|
||||
|
@ -1,57 +1,43 @@
|
||||
import test from 'ava';
|
||||
import clearModule from 'clear-module';
|
||||
import hideSensitive from '../lib/hide-sensitive';
|
||||
|
||||
test.beforeEach(() => {
|
||||
process.env = {};
|
||||
clearModule('../lib/hide-sensitive');
|
||||
});
|
||||
|
||||
test.serial('Replace multiple sensitive environment variable values', t => {
|
||||
process.env.SOME_PASSWORD = 'password';
|
||||
process.env.SOME_TOKEN = 'secret';
|
||||
test('Replace multiple sensitive environment variable values', t => {
|
||||
const env = {SOME_PASSWORD: 'password', SOME_TOKEN: 'secret'};
|
||||
t.is(
|
||||
require('../lib/hide-sensitive')(
|
||||
`https://user:${process.env.SOME_PASSWORD}@host.com?token=${process.env.SOME_TOKEN}`
|
||||
),
|
||||
hideSensitive(env)(`https://user:${env.SOME_PASSWORD}@host.com?token=${env.SOME_TOKEN}`),
|
||||
'https://user:[secure]@host.com?token=[secure]'
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Replace multiple occurences of sensitive environment variable values', t => {
|
||||
process.env.secretKey = 'secret';
|
||||
test('Replace multiple occurences of sensitive environment variable values', t => {
|
||||
const env = {secretKey: 'secret'};
|
||||
t.is(
|
||||
require('../lib/hide-sensitive')(`https://user:${process.env.secretKey}@host.com?token=${process.env.secretKey}`),
|
||||
hideSensitive(env)(`https://user:${env.secretKey}@host.com?token=${env.secretKey}`),
|
||||
'https://user:[secure]@host.com?token=[secure]'
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Escape regexp special characters', t => {
|
||||
process.env.SOME_CREDENTIALS = 'p$^{.+}\\w[a-z]o.*rd';
|
||||
test('Escape regexp special characters', t => {
|
||||
const env = {SOME_CREDENTIALS: 'p$^{.+}\\w[a-z]o.*rd'};
|
||||
t.is(hideSensitive(env)(`https://user:${env.SOME_CREDENTIALS}@host.com`), 'https://user:[secure]@host.com');
|
||||
});
|
||||
|
||||
test('Accept "undefined" input', t => {
|
||||
t.is(hideSensitive({})(), undefined);
|
||||
});
|
||||
|
||||
test('Return same string if no environment variable has to be replaced', t => {
|
||||
t.is(hideSensitive({})('test'), 'test');
|
||||
});
|
||||
|
||||
test('Exclude empty environment variables from the regexp', t => {
|
||||
const env = {SOME_PASSWORD: 'password', SOME_TOKEN: ''};
|
||||
t.is(
|
||||
require('../lib/hide-sensitive')(`https://user:${process.env.SOME_CREDENTIALS}@host.com`),
|
||||
'https://user:[secure]@host.com'
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Accept "undefined" input', t => {
|
||||
t.is(require('../lib/hide-sensitive')(), undefined);
|
||||
});
|
||||
|
||||
test.serial('Return same string if no environment variable has to be replaced', t => {
|
||||
t.is(require('../lib/hide-sensitive')('test'), 'test');
|
||||
});
|
||||
|
||||
test.serial('Exclude empty environment variables from the regexp', t => {
|
||||
process.env.SOME_PASSWORD = 'password';
|
||||
process.env.SOME_TOKEN = '';
|
||||
t.is(
|
||||
require('../lib/hide-sensitive')(`https://user:${process.env.SOME_PASSWORD}@host.com?token=`),
|
||||
hideSensitive(env)(`https://user:${env.SOME_PASSWORD}@host.com?token=`),
|
||||
'https://user:[secure]@host.com?token='
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Exclude empty environment variables from the regexp if there is only empty ones', t => {
|
||||
process.env.SOME_PASSWORD = '';
|
||||
process.env.SOME_TOKEN = ' \n ';
|
||||
t.is(require('../lib/hide-sensitive')(`https://host.com?token=`), 'https://host.com?token=');
|
||||
test('Exclude empty environment variables from the regexp if there is only empty ones', t => {
|
||||
t.is(hideSensitive({SOME_PASSWORD: '', SOME_TOKEN: ' \n '})(`https://host.com?token=`), 'https://host.com?token=');
|
||||
});
|
||||
|
@ -1,7 +1,6 @@
|
||||
import test from 'ava';
|
||||
import proxyquire from 'proxyquire';
|
||||
import {spy, stub} from 'sinon';
|
||||
import clearModule from 'clear-module';
|
||||
import AggregateError from 'aggregate-error';
|
||||
import SemanticReleaseError from '@semantic-release/error';
|
||||
import {COMMIT_NAME, COMMIT_EMAIL} from '../lib/definitions/constants';
|
||||
@ -16,51 +15,31 @@ import {
|
||||
gitShallowClone,
|
||||
} from './helpers/git-utils';
|
||||
|
||||
// Save the current process.env
|
||||
const envBackup = Object.assign({}, process.env);
|
||||
// Save the current working diretory
|
||||
const cwd = process.cwd();
|
||||
const requireNoCache = proxyquire.noPreserveCache();
|
||||
const pluginNoop = require.resolve('./fixtures/plugin-noop');
|
||||
|
||||
test.beforeEach(t => {
|
||||
clearModule('../lib/hide-sensitive');
|
||||
// Delete environment variables that could have been set on the machine running the tests
|
||||
delete process.env.GIT_CREDENTIALS;
|
||||
delete process.env.GH_TOKEN;
|
||||
delete process.env.GITHUB_TOKEN;
|
||||
delete process.env.GL_TOKEN;
|
||||
delete process.env.GITLAB_TOKEN;
|
||||
// Stub the logger functions
|
||||
t.context.log = spy();
|
||||
t.context.error = spy();
|
||||
t.context.logger = {log: t.context.log, error: t.context.error};
|
||||
t.context.stdout = stub(process.stdout, 'write');
|
||||
t.context.stderr = stub(process.stderr, 'write');
|
||||
t.context.stdout = spy();
|
||||
t.context.stderr = spy();
|
||||
t.context.logger = {log: t.context.log, error: t.context.error, stdout: t.context.stdout, stderr: t.context.stderr};
|
||||
});
|
||||
|
||||
test.afterEach.always(t => {
|
||||
// Restore process.env
|
||||
process.env = envBackup;
|
||||
// Restore the current working directory
|
||||
process.chdir(cwd);
|
||||
|
||||
t.context.stdout.restore();
|
||||
t.context.stderr.restore();
|
||||
});
|
||||
|
||||
test.serial('Plugins are called with expected values', async t => {
|
||||
test('Plugins are called with expected values', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
// Add commits to the master branch
|
||||
let commits = await gitCommits(['First']);
|
||||
let commits = await gitCommits(['First'], {cwd});
|
||||
// Create the tag corresponding to version 1.0.0
|
||||
await gitTagVersion('v1.0.0');
|
||||
await gitTagVersion('v1.0.0', undefined, {cwd});
|
||||
// Add new commits to the master branch
|
||||
commits = (await gitCommits(['Second'])).concat(commits);
|
||||
await gitPush();
|
||||
commits = (await gitCommits(['Second'], {cwd})).concat(commits);
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
const lastRelease = {version: '1.0.0', gitHead: commits[commits.length - 1].hash, gitTag: 'v1.0.0'};
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead(), gitTag: 'v2.0.0'};
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead({cwd}), gitTag: 'v2.0.0'};
|
||||
const notes1 = 'Release notes 1';
|
||||
const notes2 = 'Release notes 2';
|
||||
const notes3 = 'Release notes 3';
|
||||
@ -75,7 +54,7 @@ test.serial('Plugins are called with expected values', async t => {
|
||||
const prepare = stub().resolves();
|
||||
const publish1 = stub().resolves(release1);
|
||||
const success = stub().resolves();
|
||||
|
||||
const env = {...process.env};
|
||||
const config = {branch: 'master', repositoryUrl, globalOpt: 'global', tagFormat: `v\${version}`};
|
||||
const options = {
|
||||
...config,
|
||||
@ -88,17 +67,22 @@ test.serial('Plugins are called with expected values', async t => {
|
||||
success,
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
t.truthy(await semanticRelease(options));
|
||||
t.truthy(await semanticRelease(options, {cwd, extendEnv: false, env}));
|
||||
|
||||
t.is(verifyConditions1.callCount, 1);
|
||||
t.deepEqual(verifyConditions1.args[0][0], config);
|
||||
t.deepEqual(verifyConditions1.args[0][1], {options, logger: t.context.logger});
|
||||
t.deepEqual(verifyConditions1.args[0][1].cwd, cwd);
|
||||
t.deepEqual(verifyConditions1.args[0][1].options, options);
|
||||
t.deepEqual(verifyConditions1.args[0][1].logger, t.context.logger);
|
||||
t.is(verifyConditions2.callCount, 1);
|
||||
t.deepEqual(verifyConditions2.args[0][1], {options, logger: t.context.logger});
|
||||
t.deepEqual(verifyConditions2.args[0][0], config);
|
||||
t.deepEqual(verifyConditions2.args[0][1].cwd, cwd);
|
||||
t.deepEqual(verifyConditions2.args[0][1].options, options);
|
||||
t.deepEqual(verifyConditions2.args[0][1].logger, t.context.logger);
|
||||
|
||||
t.is(analyzeCommits.callCount, 1);
|
||||
t.deepEqual(analyzeCommits.args[0][0], config);
|
||||
@ -176,24 +160,24 @@ test.serial('Plugins are called with expected values', async t => {
|
||||
]);
|
||||
|
||||
// Verify the tag has been created on the local and remote repo and reference the gitHead
|
||||
t.is(await gitTagHead(nextRelease.gitTag), nextRelease.gitHead);
|
||||
t.is(await gitRemoteTagHead(repositoryUrl, nextRelease.gitTag), nextRelease.gitHead);
|
||||
t.is(await gitTagHead(nextRelease.gitTag, {cwd}), nextRelease.gitHead);
|
||||
t.is(await gitRemoteTagHead(repositoryUrl, nextRelease.gitTag, {cwd}), nextRelease.gitHead);
|
||||
|
||||
// Verify the author/commiter name and email hve been set
|
||||
t.is(process.env.GIT_AUTHOR_NAME, COMMIT_NAME);
|
||||
t.is(process.env.GIT_AUTHOR_EMAIL, COMMIT_EMAIL);
|
||||
t.is(process.env.GIT_COMMITTER_NAME, COMMIT_NAME);
|
||||
t.is(process.env.GIT_COMMITTER_EMAIL, COMMIT_EMAIL);
|
||||
// Verify the author/commiter name and email have been set
|
||||
t.is(env.GIT_AUTHOR_NAME, COMMIT_NAME);
|
||||
t.is(env.GIT_AUTHOR_EMAIL, COMMIT_EMAIL);
|
||||
t.is(env.GIT_COMMITTER_NAME, COMMIT_NAME);
|
||||
t.is(env.GIT_COMMITTER_EMAIL, COMMIT_EMAIL);
|
||||
});
|
||||
|
||||
test.serial('Use custom tag format', async t => {
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
await gitCommits(['First']);
|
||||
await gitTagVersion('test-1.0.0');
|
||||
await gitCommits(['Second']);
|
||||
await gitPush();
|
||||
test('Use custom tag format', async t => {
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
await gitCommits(['First'], {cwd});
|
||||
await gitTagVersion('test-1.0.0', undefined, {cwd});
|
||||
await gitCommits(['Second'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead(), gitTag: 'test-2.0.0'};
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead({cwd}), gitTag: 'test-2.0.0'};
|
||||
const notes = 'Release notes';
|
||||
const config = {branch: 'master', repositoryUrl, globalOpt: 'global', tagFormat: `test-\${version}`};
|
||||
const options = {
|
||||
@ -208,38 +192,37 @@ test.serial('Use custom tag format', async t => {
|
||||
fail: stub().resolves(),
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
t.truthy(await semanticRelease(options));
|
||||
t.truthy(await semanticRelease(options, {cwd, env: {}}));
|
||||
|
||||
// Verify the tag has been created on the local and remote repo and reference the gitHead
|
||||
t.is(await gitTagHead(nextRelease.gitTag), nextRelease.gitHead);
|
||||
t.is(await gitRemoteTagHead(repositoryUrl, nextRelease.gitTag), nextRelease.gitHead);
|
||||
t.is(await gitTagHead(nextRelease.gitTag, {cwd}), nextRelease.gitHead);
|
||||
t.is(await gitRemoteTagHead(repositoryUrl, nextRelease.gitTag, {cwd}), nextRelease.gitHead);
|
||||
});
|
||||
|
||||
test.serial('Use new gitHead, and recreate release notes if a prepare plugin create a commit', async t => {
|
||||
test('Use new gitHead, and recreate release notes if a prepare plugin create a commit', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
// Add commits to the master branch
|
||||
let commits = await gitCommits(['First']);
|
||||
let commits = await gitCommits(['First'], {cwd});
|
||||
// Create the tag corresponding to version 1.0.0
|
||||
await gitTagVersion('v1.0.0');
|
||||
await gitTagVersion('v1.0.0', undefined, {cwd});
|
||||
// Add new commits to the master branch
|
||||
commits = (await gitCommits(['Second'])).concat(commits);
|
||||
await gitPush();
|
||||
commits = (await gitCommits(['Second'], {cwd})).concat(commits);
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead(), gitTag: 'v2.0.0'};
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead({cwd}), gitTag: 'v2.0.0'};
|
||||
const notes = 'Release notes';
|
||||
|
||||
const generateNotes = stub().resolves(notes);
|
||||
const prepare1 = stub().callsFake(async () => {
|
||||
commits = (await gitCommits(['Third'])).concat(commits);
|
||||
commits = (await gitCommits(['Third'], {cwd})).concat(commits);
|
||||
});
|
||||
const prepare2 = stub().resolves();
|
||||
const publish = stub().resolves();
|
||||
|
||||
const options = {
|
||||
branch: 'master',
|
||||
repositoryUrl,
|
||||
@ -253,19 +236,19 @@ test.serial('Use new gitHead, and recreate release notes if a prepare plugin cre
|
||||
fail: stub().resolves(),
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
|
||||
t.truthy(await semanticRelease(options));
|
||||
t.truthy(await semanticRelease(options, {cwd, env: {}}));
|
||||
|
||||
t.is(generateNotes.callCount, 2);
|
||||
t.deepEqual(generateNotes.args[0][1].nextRelease, nextRelease);
|
||||
t.is(prepare1.callCount, 1);
|
||||
t.deepEqual(prepare1.args[0][1].nextRelease, {...nextRelease, notes});
|
||||
|
||||
nextRelease.gitHead = await getGitHead();
|
||||
nextRelease.gitHead = await getGitHead({cwd});
|
||||
|
||||
t.deepEqual(generateNotes.args[1][1].nextRelease, {...nextRelease, notes});
|
||||
t.is(prepare2.callCount, 1);
|
||||
@ -275,22 +258,22 @@ test.serial('Use new gitHead, and recreate release notes if a prepare plugin cre
|
||||
t.deepEqual(publish.args[0][1].nextRelease, {...nextRelease, notes});
|
||||
|
||||
// Verify the tag has been created on the local and remote repo and reference the last gitHead
|
||||
t.is(await gitTagHead(nextRelease.gitTag), commits[0].hash);
|
||||
t.is(await gitRemoteTagHead(repositoryUrl, nextRelease.gitTag), commits[0].hash);
|
||||
t.is(await gitTagHead(nextRelease.gitTag, {cwd}), commits[0].hash);
|
||||
t.is(await gitRemoteTagHead(repositoryUrl, nextRelease.gitTag, {cwd}), commits[0].hash);
|
||||
});
|
||||
|
||||
test.serial('Call all "success" plugins even if one errors out', async t => {
|
||||
test('Call all "success" plugins even if one errors out', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
// Add commits to the master branch
|
||||
await gitCommits(['First']);
|
||||
await gitCommits(['First'], {cwd});
|
||||
// Create the tag corresponding to version 1.0.0
|
||||
await gitTagVersion('v1.0.0');
|
||||
await gitTagVersion('v1.0.0', undefined, {cwd});
|
||||
// Add new commits to the master branch
|
||||
await gitCommits(['Second']);
|
||||
await gitPush();
|
||||
await gitCommits(['Second'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead(), gitTag: 'v2.0.0'};
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead({cwd}), gitTag: 'v2.0.0'};
|
||||
const notes = 'Release notes';
|
||||
const verifyConditions1 = stub().resolves();
|
||||
const verifyConditions2 = stub().resolves();
|
||||
@ -300,7 +283,6 @@ test.serial('Call all "success" plugins even if one errors out', async t => {
|
||||
const publish = stub().resolves(release);
|
||||
const success1 = stub().rejects();
|
||||
const success2 = stub().resolves();
|
||||
|
||||
const config = {branch: 'master', repositoryUrl, globalOpt: 'global', tagFormat: `v\${version}`};
|
||||
const options = {
|
||||
...config,
|
||||
@ -312,27 +294,26 @@ test.serial('Call all "success" plugins even if one errors out', async t => {
|
||||
success: [success1, success2],
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
|
||||
await t.throws(semanticRelease(options));
|
||||
await t.throws(semanticRelease(options, {cwd, env: {}}));
|
||||
|
||||
t.is(success1.callCount, 1);
|
||||
t.deepEqual(success1.args[0][0], config);
|
||||
t.deepEqual(success1.args[0][1].releases, [{...release, ...nextRelease, notes, pluginName: '[Function: proxy]'}]);
|
||||
|
||||
t.is(success2.callCount, 1);
|
||||
t.deepEqual(success2.args[0][1].releases, [{...release, ...nextRelease, notes, pluginName: '[Function: proxy]'}]);
|
||||
});
|
||||
|
||||
test.serial('Log all "verifyConditions" errors', async t => {
|
||||
test('Log all "verifyConditions" errors', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
// Add commits to the master branch
|
||||
await gitCommits(['First']);
|
||||
await gitPush();
|
||||
await gitCommits(['First'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
const error1 = new Error('error 1');
|
||||
const error2 = new SemanticReleaseError('error 2', 'ERR2');
|
||||
@ -345,11 +326,11 @@ test.serial('Log all "verifyConditions" errors', async t => {
|
||||
fail,
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
const errors = [...(await t.throws(semanticRelease(options)))];
|
||||
const errors = [...(await t.throws(semanticRelease(options, {cwd, env: {}})))];
|
||||
|
||||
t.deepEqual(errors, [error1, error2, error3]);
|
||||
t.deepEqual(t.context.log.args[t.context.log.args.length - 2], ['%s error 2', 'ERR2']);
|
||||
@ -366,16 +347,16 @@ test.serial('Log all "verifyConditions" errors', async t => {
|
||||
t.deepEqual(fail.args[0][1].errors, [error2, error3]);
|
||||
});
|
||||
|
||||
test.serial('Log all "verifyRelease" errors', async t => {
|
||||
test('Log all "verifyRelease" errors', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
// Add commits to the master branch
|
||||
await gitCommits(['First']);
|
||||
await gitCommits(['First'], {cwd});
|
||||
// Create the tag corresponding to version 1.0.0
|
||||
await gitTagVersion('v1.0.0');
|
||||
await gitTagVersion('v1.0.0', undefined, {cwd});
|
||||
// Add new commits to the master branch
|
||||
await gitCommits(['Second']);
|
||||
await gitPush();
|
||||
await gitCommits(['Second'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
const error1 = new SemanticReleaseError('error 1', 'ERR1');
|
||||
const error2 = new SemanticReleaseError('error 2', 'ERR2');
|
||||
@ -389,11 +370,11 @@ test.serial('Log all "verifyRelease" errors', async t => {
|
||||
fail,
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
const errors = [...(await t.throws(semanticRelease(options)))];
|
||||
const errors = [...(await t.throws(semanticRelease(options, {cwd, env: {}})))];
|
||||
|
||||
t.deepEqual(errors, [error1, error2]);
|
||||
t.deepEqual(t.context.log.args[t.context.log.args.length - 2], ['%s error 1', 'ERR1']);
|
||||
@ -403,18 +384,18 @@ test.serial('Log all "verifyRelease" errors', async t => {
|
||||
t.deepEqual(fail.args[0][1].errors, [error1, error2]);
|
||||
});
|
||||
|
||||
test.serial('Dry-run skips publish and success', async t => {
|
||||
test('Dry-run skips publish and success', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
// Add commits to the master branch
|
||||
await gitCommits(['First']);
|
||||
await gitCommits(['First'], {cwd});
|
||||
// Create the tag corresponding to version 1.0.0
|
||||
await gitTagVersion('v1.0.0');
|
||||
await gitTagVersion('v1.0.0', undefined, {cwd});
|
||||
// Add new commits to the master branch
|
||||
await gitCommits(['Second']);
|
||||
await gitPush();
|
||||
await gitCommits(['Second'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead(), gitTag: 'v2.0.0'};
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead({cwd}), gitTag: 'v2.0.0'};
|
||||
const notes = 'Release notes';
|
||||
|
||||
const verifyConditions = stub().resolves();
|
||||
@ -437,11 +418,11 @@ test.serial('Dry-run skips publish and success', async t => {
|
||||
success,
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
t.truthy(await semanticRelease(options));
|
||||
t.truthy(await semanticRelease(options, {cwd, env: {}}));
|
||||
|
||||
t.not(t.context.log.args[0][0], 'This run was not triggered in a known CI environment, running in dry-run mode.');
|
||||
t.is(verifyConditions.callCount, 1);
|
||||
@ -452,16 +433,16 @@ test.serial('Dry-run skips publish and success', async t => {
|
||||
t.is(success.callCount, 0);
|
||||
});
|
||||
|
||||
test.serial('Dry-run skips fail', async t => {
|
||||
test('Dry-run skips fail', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
// Add commits to the master branch
|
||||
await gitCommits(['First']);
|
||||
await gitCommits(['First'], {cwd});
|
||||
// Create the tag corresponding to version 1.0.0
|
||||
await gitTagVersion('v1.0.0');
|
||||
await gitTagVersion('v1.0.0', undefined, {cwd});
|
||||
// Add new commits to the master branch
|
||||
await gitCommits(['Second']);
|
||||
await gitPush();
|
||||
await gitCommits(['Second'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
const error1 = new SemanticReleaseError('error 1', 'ERR1');
|
||||
const error2 = new SemanticReleaseError('error 2', 'ERR2');
|
||||
@ -475,11 +456,11 @@ test.serial('Dry-run skips fail', async t => {
|
||||
fail,
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
const errors = [...(await t.throws(semanticRelease(options)))];
|
||||
const errors = [...(await t.throws(semanticRelease(options, {cwd, env: {}})))];
|
||||
|
||||
t.deepEqual(errors, [error1, error2]);
|
||||
t.deepEqual(t.context.log.args[t.context.log.args.length - 2], ['%s error 1', 'ERR1']);
|
||||
@ -487,18 +468,18 @@ test.serial('Dry-run skips fail', async t => {
|
||||
t.is(fail.callCount, 0);
|
||||
});
|
||||
|
||||
test.serial('Force a dry-run if not on a CI and "noCi" is not explicitly set', async t => {
|
||||
test('Force a dry-run if not on a CI and "noCi" is not explicitly set', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
// Add commits to the master branch
|
||||
await gitCommits(['First']);
|
||||
await gitCommits(['First'], {cwd});
|
||||
// Create the tag corresponding to version 1.0.0
|
||||
await gitTagVersion('v1.0.0');
|
||||
await gitTagVersion('v1.0.0', undefined, {cwd});
|
||||
// Add new commits to the master branch
|
||||
await gitCommits(['Second']);
|
||||
await gitPush();
|
||||
await gitCommits(['Second'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead(), gitTag: 'v2.0.0'};
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead({cwd}), gitTag: 'v2.0.0'};
|
||||
const notes = 'Release notes';
|
||||
|
||||
const verifyConditions = stub().resolves();
|
||||
@ -522,11 +503,11 @@ test.serial('Force a dry-run if not on a CI and "noCi" is not explicitly set', a
|
||||
fail: stub().resolves(),
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: false, branch: 'master'}),
|
||||
});
|
||||
t.truthy(await semanticRelease(options));
|
||||
t.truthy(await semanticRelease(options, {cwd, env: {}}));
|
||||
|
||||
t.is(t.context.log.args[1][0], 'This run was not triggered in a known CI environment, running in dry-run mode.');
|
||||
t.is(verifyConditions.callCount, 1);
|
||||
@ -539,16 +520,16 @@ test.serial('Force a dry-run if not on a CI and "noCi" is not explicitly set', a
|
||||
|
||||
test.serial('Dry-run does not print changelog if "generateNotes" return "undefined"', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
// Add commits to the master branch
|
||||
await gitCommits(['First']);
|
||||
await gitCommits(['First'], {cwd});
|
||||
// Create the tag corresponding to version 1.0.0
|
||||
await gitTagVersion('v1.0.0');
|
||||
await gitTagVersion('v1.0.0', undefined, {cwd});
|
||||
// Add new commits to the master branch
|
||||
await gitCommits(['Second']);
|
||||
await gitPush();
|
||||
await gitCommits(['Second'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead(), gitTag: 'v2.0.0'};
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead({cwd}), gitTag: 'v2.0.0'};
|
||||
const analyzeCommits = stub().resolves(nextRelease.type);
|
||||
const generateNotes = stub().resolves();
|
||||
|
||||
@ -565,27 +546,27 @@ test.serial('Dry-run does not print changelog if "generateNotes" return "undefin
|
||||
success: false,
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
t.truthy(await semanticRelease(options));
|
||||
t.truthy(await semanticRelease(options, {cwd, env: {}}));
|
||||
|
||||
t.deepEqual(t.context.log.args[t.context.log.args.length - 1], ['Release note for version %s:\n', '2.0.0']);
|
||||
});
|
||||
|
||||
test.serial('Allow local releases with "noCi" option', async t => {
|
||||
test('Allow local releases with "noCi" option', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
// Add commits to the master branch
|
||||
await gitCommits(['First']);
|
||||
await gitCommits(['First'], {cwd});
|
||||
// Create the tag corresponding to version 1.0.0
|
||||
await gitTagVersion('v1.0.0');
|
||||
await gitTagVersion('v1.0.0', undefined, {cwd});
|
||||
// Add new commits to the master branch
|
||||
await gitCommits(['Second']);
|
||||
await gitPush();
|
||||
await gitCommits(['Second'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead(), gitTag: 'v2.0.0'};
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead({cwd}), gitTag: 'v2.0.0'};
|
||||
const notes = 'Release notes';
|
||||
|
||||
const verifyConditions = stub().resolves();
|
||||
@ -609,11 +590,11 @@ test.serial('Allow local releases with "noCi" option', async t => {
|
||||
fail: stub().resolves(),
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: false, branch: 'master', isPr: true}),
|
||||
});
|
||||
t.truthy(await semanticRelease(options));
|
||||
t.truthy(await semanticRelease(options, {cwd, env: {}}));
|
||||
|
||||
t.not(t.context.log.args[0][0], 'This run was not triggered in a known CI environment, running in dry-run mode.');
|
||||
t.not(
|
||||
@ -628,19 +609,19 @@ test.serial('Allow local releases with "noCi" option', async t => {
|
||||
t.is(success.callCount, 1);
|
||||
});
|
||||
|
||||
test.serial('Accept "undefined" value returned by the "generateNotes" plugins', async t => {
|
||||
test('Accept "undefined" value returned by the "generateNotes" plugins', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
// Add commits to the master branch
|
||||
let commits = await gitCommits(['First']);
|
||||
let commits = await gitCommits(['First'], {cwd});
|
||||
// Create the tag corresponding to version 1.0.0
|
||||
await gitTagVersion('v1.0.0');
|
||||
await gitTagVersion('v1.0.0', undefined, {cwd});
|
||||
// Add new commits to the master branch
|
||||
commits = (await gitCommits(['Second'])).concat(commits);
|
||||
await gitPush();
|
||||
commits = (await gitCommits(['Second'], {cwd})).concat(commits);
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
const lastRelease = {version: '1.0.0', gitHead: commits[commits.length - 1].hash, gitTag: 'v1.0.0'};
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead(), gitTag: 'v2.0.0'};
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead({cwd}), gitTag: 'v2.0.0'};
|
||||
const analyzeCommits = stub().resolves(nextRelease.type);
|
||||
const verifyRelease = stub().resolves();
|
||||
const generateNotes1 = stub().resolves();
|
||||
@ -661,11 +642,11 @@ test.serial('Accept "undefined" value returned by the "generateNotes" plugins',
|
||||
fail: stub().resolves(),
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
t.truthy(await semanticRelease(options));
|
||||
t.truthy(await semanticRelease(options, {cwd, env: {}}));
|
||||
|
||||
t.is(analyzeCommits.callCount, 1);
|
||||
t.deepEqual(analyzeCommits.args[0][1].lastRelease, lastRelease);
|
||||
@ -684,50 +665,49 @@ test.serial('Accept "undefined" value returned by the "generateNotes" plugins',
|
||||
t.is(publish.args[0][1].nextRelease.notes, notes2);
|
||||
});
|
||||
|
||||
test.serial('Returns falsy value if triggered by a PR', async t => {
|
||||
test('Returns falsy value if triggered by a PR', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: true}),
|
||||
});
|
||||
|
||||
t.falsy(await semanticRelease({repositoryUrl}));
|
||||
t.falsy(await semanticRelease({cwd, repositoryUrl}, {cwd, env: {}}));
|
||||
t.is(
|
||||
t.context.log.args[t.context.log.args.length - 1][0],
|
||||
"This run was triggered by a pull request and therefore a new version won't be published."
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Returns falsy value if triggered on an outdated clone', async t => {
|
||||
test('Returns falsy value if triggered on an outdated clone', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const repoDir = process.cwd();
|
||||
let {cwd, repositoryUrl} = await gitRepo(true);
|
||||
const repoDir = cwd;
|
||||
// Add commits to the master branch
|
||||
await gitCommits(['First']);
|
||||
await gitCommits(['Second']);
|
||||
await gitPush();
|
||||
await gitShallowClone(repositoryUrl);
|
||||
await gitCommits(['Third']);
|
||||
await gitPush();
|
||||
process.chdir(repoDir);
|
||||
await gitCommits(['First'], {cwd});
|
||||
await gitCommits(['Second'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
cwd = await gitShallowClone(repositoryUrl);
|
||||
await gitCommits(['Third'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
|
||||
t.falsy(await semanticRelease({repositoryUrl}));
|
||||
t.falsy(await semanticRelease({repositoryUrl}, {cwd: repoDir, env: {}}));
|
||||
t.deepEqual(t.context.log.args[t.context.log.args.length - 1], [
|
||||
"The local branch %s is behind the remote one, therefore a new version won't be published.",
|
||||
'master',
|
||||
]);
|
||||
});
|
||||
|
||||
test.serial('Returns falsy value if not running from the configured branch', async t => {
|
||||
test('Returns falsy value if not running from the configured branch', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
const options = {
|
||||
branch: 'master',
|
||||
repositoryUrl,
|
||||
@ -741,24 +721,24 @@ test.serial('Returns falsy value if not running from the configured branch', asy
|
||||
fail: stub().resolves(),
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'other-branch', isPr: false}),
|
||||
});
|
||||
|
||||
t.falsy(await semanticRelease(options));
|
||||
t.falsy(await semanticRelease(options, {cwd, env: {}}));
|
||||
t.is(
|
||||
t.context.log.args[1][0],
|
||||
'This test run was triggered on the branch other-branch, while semantic-release is configured to only publish from master, therefore a new version won’t be published.'
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Returns falsy value if there is no relevant changes', async t => {
|
||||
test('Returns falsy value if there is no relevant changes', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
// Add commits to the master branch
|
||||
await gitCommits(['First']);
|
||||
await gitPush();
|
||||
await gitCommits(['First'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
const analyzeCommits = stub().resolves();
|
||||
const verifyRelease = stub().resolves();
|
||||
@ -778,12 +758,12 @@ test.serial('Returns falsy value if there is no relevant changes', async t => {
|
||||
fail: stub().resolves(),
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
|
||||
t.falsy(await semanticRelease(options));
|
||||
t.falsy(await semanticRelease(options, {cwd, env: {}}));
|
||||
t.is(analyzeCommits.callCount, 1);
|
||||
t.is(verifyRelease.callCount, 0);
|
||||
t.is(generateNotes.callCount, 0);
|
||||
@ -794,11 +774,12 @@ test.serial('Returns falsy value if there is no relevant changes', async t => {
|
||||
);
|
||||
});
|
||||
|
||||
test.serial('Exclude commits with [skip release] or [release skip] from analysis', async t => {
|
||||
test('Exclude commits with [skip release] or [release skip] from analysis', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
// Add commits to the master branch
|
||||
const commits = await gitCommits([
|
||||
const commits = await gitCommits(
|
||||
[
|
||||
'Test commit',
|
||||
'Test commit [skip release]',
|
||||
'Test commit [release skip]',
|
||||
@ -807,8 +788,10 @@ test.serial('Exclude commits with [skip release] or [release skip] from analysis
|
||||
'Test commit [skip release]',
|
||||
'Test commit\n\n commit body\n[skip release]',
|
||||
'Test commit\n\n commit body\n[release skip]',
|
||||
]);
|
||||
await gitPush();
|
||||
],
|
||||
{cwd}
|
||||
);
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
const analyzeCommits = stub().resolves();
|
||||
const config = {branch: 'master', repositoryUrl, globalOpt: 'global'};
|
||||
const options = {
|
||||
@ -823,47 +806,19 @@ test.serial('Exclude commits with [skip release] or [release skip] from analysis
|
||||
fail: stub().resolves(),
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
await semanticRelease(options);
|
||||
await semanticRelease(options, {cwd, env: {}});
|
||||
|
||||
t.is(analyzeCommits.callCount, 1);
|
||||
|
||||
t.is(analyzeCommits.args[0][1].commits.length, 2);
|
||||
t.deepEqual(analyzeCommits.args[0][1].commits[0], commits[commits.length - 1]);
|
||||
});
|
||||
|
||||
test.serial('Hide sensitive environment variable values from the logs', async t => {
|
||||
process.env.MY_TOKEN = 'secret token';
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
|
||||
const options = {
|
||||
branch: 'master',
|
||||
repositoryUrl,
|
||||
verifyConditions: async (pluginConfig, {logger}) => {
|
||||
console.log(`Console: The token ${process.env.MY_TOKEN} is invalid`);
|
||||
logger.log(`Log: The token ${process.env.MY_TOKEN} is invalid`);
|
||||
logger.error(`Error: The token ${process.env.MY_TOKEN} is invalid`);
|
||||
throw new Error(`Invalid token ${process.env.MY_TOKEN}`);
|
||||
},
|
||||
};
|
||||
const semanticRelease = proxyquire('..', {
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
|
||||
await t.throws(semanticRelease(options));
|
||||
|
||||
t.regex(t.context.stdout.args[t.context.stdout.args.length - 2][0], /Console: The token \[secure\] is invalid/);
|
||||
t.regex(t.context.stdout.args[t.context.stdout.args.length - 1][0], /Log: The token \[secure\] is invalid/);
|
||||
t.regex(t.context.stderr.args[0][0], /Error: The token \[secure\] is invalid/);
|
||||
t.regex(t.context.stderr.args[1][0], /Invalid token \[secure\]/);
|
||||
});
|
||||
|
||||
test.serial('Log both plugins errors and errors thrown by "fail" plugin', async t => {
|
||||
process.env.MY_TOKEN = 'secret token';
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
test('Log both plugins errors and errors thrown by "fail" plugin', async t => {
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
const pluginError = new SemanticReleaseError('Plugin error', 'ERR');
|
||||
const failError1 = new Error('Fail error 1');
|
||||
const failError2 = new Error('Fail error 2');
|
||||
@ -874,21 +829,20 @@ test.serial('Log both plugins errors and errors thrown by "fail" plugin', async
|
||||
verifyConditions: stub().rejects(pluginError),
|
||||
fail: [stub().rejects(failError1), stub().rejects(failError2)],
|
||||
};
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
|
||||
await t.throws(semanticRelease(options));
|
||||
await t.throws(semanticRelease(options, {cwd, env: {}}));
|
||||
|
||||
t.is(t.context.error.args[t.context.error.args.length - 2][1], failError1);
|
||||
t.is(t.context.error.args[t.context.error.args.length - 1][1], failError2);
|
||||
t.deepEqual(t.context.log.args[t.context.log.args.length - 1], ['%s Plugin error', 'ERR']);
|
||||
});
|
||||
|
||||
test.serial('Call "fail" only if a plugin returns a SemanticReleaseError', async t => {
|
||||
process.env.MY_TOKEN = 'secret token';
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
test('Call "fail" only if a plugin returns a SemanticReleaseError', async t => {
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
const pluginError = new Error('Plugin error');
|
||||
const fail = stub().resolves();
|
||||
|
||||
@ -898,42 +852,42 @@ test.serial('Call "fail" only if a plugin returns a SemanticReleaseError', async
|
||||
verifyConditions: stub().rejects(pluginError),
|
||||
fail,
|
||||
};
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
|
||||
await t.throws(semanticRelease(options));
|
||||
await t.throws(semanticRelease(options, {cwd, env: {}}));
|
||||
|
||||
t.true(fail.notCalled);
|
||||
t.is(t.context.error.args[t.context.error.args.length - 1][1], pluginError);
|
||||
});
|
||||
|
||||
test.serial('Throw SemanticReleaseError if repositoryUrl is not set and cannot be found from repo config', async t => {
|
||||
test('Throw SemanticReleaseError if repositoryUrl is not set and cannot be found from repo config', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
await gitRepo();
|
||||
const {cwd} = await gitRepo();
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
const errors = [...(await t.throws(semanticRelease()))];
|
||||
const errors = [...(await t.throws(semanticRelease({}, {cwd, env: {}})))];
|
||||
|
||||
// Verify error code and type
|
||||
t.is(errors[0].code, 'ENOREPOURL');
|
||||
t.is(errors[0].name, 'SemanticReleaseError');
|
||||
});
|
||||
|
||||
test.serial('Throw an Error if plugin returns an unexpected value', async t => {
|
||||
test('Throw an Error if plugin returns an unexpected value', async t => {
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
// Add commits to the master branch
|
||||
await gitCommits(['First']);
|
||||
await gitCommits(['First'], {cwd});
|
||||
// Create the tag corresponding to version 1.0.0
|
||||
await gitTagVersion('v1.0.0');
|
||||
await gitTagVersion('v1.0.0', undefined, {cwd});
|
||||
// Add new commits to the master branch
|
||||
await gitCommits(['Second']);
|
||||
await gitPush();
|
||||
await gitCommits(['Second'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
const verifyConditions = stub().resolves();
|
||||
const analyzeCommits = stub().resolves('string');
|
||||
@ -947,24 +901,23 @@ test.serial('Throw an Error if plugin returns an unexpected value', async t => {
|
||||
fail: stub().resolves(),
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
const error = await t.throws(semanticRelease(options), Error);
|
||||
|
||||
const error = await t.throws(semanticRelease(options, {cwd, env: {}}), Error);
|
||||
t.regex(error.details, /string/);
|
||||
});
|
||||
|
||||
test.serial('Get all commits including the ones not in the shallow clone', async t => {
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
await gitTagVersion('v1.0.0');
|
||||
await gitCommits(['First', 'Second', 'Third']);
|
||||
await gitPush(repositoryUrl, 'master');
|
||||
test('Get all commits including the ones not in the shallow clone', async t => {
|
||||
let {cwd, repositoryUrl} = await gitRepo(true);
|
||||
await gitTagVersion('v1.0.0', undefined, {cwd});
|
||||
await gitCommits(['First', 'Second', 'Third'], {cwd});
|
||||
await gitPush(repositoryUrl, 'master', {cwd});
|
||||
|
||||
await gitShallowClone(repositoryUrl);
|
||||
cwd = await gitShallowClone(repositoryUrl);
|
||||
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead(), gitTag: 'v2.0.0'};
|
||||
const nextRelease = {type: 'major', version: '2.0.0', gitHead: await getGitHead({cwd}), gitTag: 'v2.0.0'};
|
||||
const notes = 'Release notes';
|
||||
const analyzeCommits = stub().resolves(nextRelease.type);
|
||||
|
||||
@ -981,11 +934,11 @@ test.serial('Get all commits including the ones not in the shallow clone', async
|
||||
fail: stub().resolves(),
|
||||
};
|
||||
|
||||
const semanticRelease = proxyquire('..', {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': t.context.logger,
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
t.truthy(await semanticRelease(options));
|
||||
t.truthy(await semanticRelease(options, {cwd, env: {}}));
|
||||
|
||||
t.is(analyzeCommits.args[0][1].commits.length, 3);
|
||||
});
|
||||
|
@ -1,15 +1,19 @@
|
||||
import path from 'path';
|
||||
import proxyquire from 'proxyquire';
|
||||
import test from 'ava';
|
||||
import {escapeRegExp} from 'lodash';
|
||||
import {writeJson, readJson} from 'fs-extra';
|
||||
import {stub} from 'sinon';
|
||||
import execa from 'execa';
|
||||
import {SECRET_REPLACEMENT} from '../lib/definitions/constants';
|
||||
import {gitHead as getGitHead, gitTagHead, gitRepo, gitCommits, gitRemoteTagHead, gitPush} from './helpers/git-utils';
|
||||
import gitbox from './helpers/gitbox';
|
||||
import mockServer from './helpers/mockserver';
|
||||
import npmRegistry from './helpers/npm-registry';
|
||||
import semanticRelease from '..';
|
||||
|
||||
/* eslint camelcase: ["error", {properties: "never"}] */
|
||||
|
||||
const requireNoCache = proxyquire.noPreserveCache();
|
||||
|
||||
// Environment variables used with semantic-release cli (similar to what a user would setup)
|
||||
const env = {
|
||||
GH_TOKEN: gitbox.gitCredential,
|
||||
@ -17,86 +21,40 @@ const env = {
|
||||
NPM_EMAIL: 'integration@test.com',
|
||||
NPM_USERNAME: 'integration',
|
||||
NPM_PASSWORD: 'suchsecure',
|
||||
TRAVIS: 'true',
|
||||
CI: 'true',
|
||||
TRAVIS_BRANCH: 'master',
|
||||
TRAVIS_PULL_REQUEST: 'false',
|
||||
};
|
||||
// Environment variables used only for the local npm command used to do verification
|
||||
const testEnv = Object.assign({}, process.env, {
|
||||
const testEnv = {
|
||||
...process.env,
|
||||
npm_config_registry: npmRegistry.url,
|
||||
NPM_EMAIL: 'integration@test.com',
|
||||
LEGACY_TOKEN: Buffer.from(`${process.env.NPM_USERNAME}:${process.env.NPM_PASSWORD}`, 'utf8').toString('base64'),
|
||||
});
|
||||
// Save the current process.env
|
||||
const envBackup = Object.assign({}, process.env);
|
||||
};
|
||||
|
||||
const cli = require.resolve('../bin/semantic-release');
|
||||
const pluginError = require.resolve('./fixtures/plugin-error');
|
||||
const pluginInheritedError = require.resolve('./fixtures/plugin-error-inherited');
|
||||
// Save the current working diretory
|
||||
const cwd = process.cwd();
|
||||
// Disable logs during tests
|
||||
stub(process.stdout, 'write');
|
||||
stub(process.stderr, 'write');
|
||||
const pluginLogEnv = require.resolve('./fixtures/plugin-log-env');
|
||||
|
||||
test.before(async () => {
|
||||
// Start the Git server
|
||||
await gitbox.start();
|
||||
// Start the local NPM registry
|
||||
await npmRegistry.start();
|
||||
// Start Mock Server
|
||||
await mockServer.start();
|
||||
});
|
||||
|
||||
test.beforeEach(() => {
|
||||
// Delete environment variables that could have been set on the machine running the tests
|
||||
delete process.env.NPM_TOKEN;
|
||||
delete process.env.NPM_USERNAME;
|
||||
delete process.env.NPM_PASSWORD;
|
||||
delete process.env.NPM_EMAIL;
|
||||
delete process.env.GH_URL;
|
||||
delete process.env.GITHUB_URL;
|
||||
delete process.env.GH_PREFIX;
|
||||
delete process.env.GITHUB_PREFIX;
|
||||
delete process.env.GIT_CREDENTIALS;
|
||||
delete process.env.GH_TOKEN;
|
||||
delete process.env.GITHUB_TOKEN;
|
||||
delete process.env.GL_TOKEN;
|
||||
delete process.env.GITLAB_TOKEN;
|
||||
|
||||
process.env.TRAVIS = 'true';
|
||||
process.env.CI = 'true';
|
||||
process.env.TRAVIS_BRANCH = 'master';
|
||||
process.env.TRAVIS_PULL_REQUEST = 'false';
|
||||
|
||||
// Delete all `npm_config` environment variable set by CI as they take precedence over the `.npmrc` because the process that runs the tests is started before the `.npmrc` is created
|
||||
for (let i = 0, keys = Object.keys(process.env); i < keys.length; i++) {
|
||||
if (keys[i].startsWith('npm_')) {
|
||||
delete process.env[keys[i]];
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
test.afterEach.always(() => {
|
||||
// Restore process.env
|
||||
process.env = envBackup;
|
||||
// Restore the current working directory
|
||||
process.chdir(cwd);
|
||||
await Promise.all([gitbox.start(), npmRegistry.start(), mockServer.start()]);
|
||||
});
|
||||
|
||||
test.after.always(async () => {
|
||||
// Stop the Git server
|
||||
await gitbox.stop();
|
||||
// Stop the local NPM registry
|
||||
await npmRegistry.stop();
|
||||
// Stop Mock Server
|
||||
await mockServer.stop();
|
||||
await Promise.all([gitbox.stop(), npmRegistry.stop(), mockServer.stop()]);
|
||||
});
|
||||
|
||||
test.serial('Release patch, minor and major versions', async t => {
|
||||
test('Release patch, minor and major versions', async t => {
|
||||
const packageName = 'test-release';
|
||||
const owner = 'git';
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
t.log('Create git repository and package.json');
|
||||
const {repositoryUrl, authUrl} = await gitbox.createRepo(packageName);
|
||||
const {cwd, repositoryUrl, authUrl} = await gitbox.createRepo(packageName);
|
||||
// Create package.json in repository root
|
||||
await writeJson('./package.json', {
|
||||
await writeJson(path.resolve(cwd, 'package.json'), {
|
||||
name: packageName,
|
||||
version: '0.0.0-dev',
|
||||
repository: {url: repositoryUrl},
|
||||
@ -104,19 +62,18 @@ test.serial('Release patch, minor and major versions', async t => {
|
||||
release: {success: false, fail: false},
|
||||
});
|
||||
// Create a npm-shrinkwrap.json file
|
||||
await execa('npm', ['shrinkwrap'], {env: testEnv});
|
||||
await execa('npm', ['shrinkwrap'], {env: testEnv, cwd});
|
||||
|
||||
/* No release */
|
||||
|
||||
let verifyMock = await mockServer.mock(
|
||||
`/repos/${owner}/${packageName}`,
|
||||
{headers: [{name: 'Authorization', values: [`token ${env.GH_TOKEN}`]}]},
|
||||
{body: {permissions: {push: true}}, method: 'GET'}
|
||||
);
|
||||
t.log('Commit a chore');
|
||||
await gitCommits(['chore: Init repository']);
|
||||
await gitCommits(['chore: Init repository'], {cwd});
|
||||
t.log('$ semantic-release');
|
||||
let {stdout, code} = await execa(cli, [], {env});
|
||||
let {stdout, code} = await execa(cli, [], {env, cwd});
|
||||
t.regex(stdout, /There are no relevant changes, so no new version is released/);
|
||||
t.is(code, 0);
|
||||
|
||||
@ -137,26 +94,26 @@ test.serial('Release patch, minor and major versions', async t => {
|
||||
);
|
||||
|
||||
t.log('Commit a feature');
|
||||
await gitCommits(['feat: Initial commit']);
|
||||
await gitCommits(['feat: Initial commit'], {cwd});
|
||||
t.log('$ semantic-release');
|
||||
({stdout, code} = await execa(cli, [], {env}));
|
||||
({stdout, code} = await execa(cli, [], {env, cwd}));
|
||||
t.regex(stdout, new RegExp(`Published GitHub release: release-url/${version}`));
|
||||
t.regex(stdout, new RegExp(`Publishing version ${version} to npm registry`));
|
||||
t.is(code, 0);
|
||||
|
||||
// Verify package.json and npm-shrinkwrap.json have been updated
|
||||
t.is((await readJson('./package.json')).version, version);
|
||||
t.is((await readJson('./npm-shrinkwrap.json')).version, version);
|
||||
t.is((await readJson(path.resolve(cwd, 'package.json'))).version, version);
|
||||
t.is((await readJson(path.resolve(cwd, 'npm-shrinkwrap.json'))).version, version);
|
||||
|
||||
// Retrieve the published package from the registry and check version and gitHead
|
||||
let [, releasedVersion, releasedGitHead] = /^version = '(.+)'\s+gitHead = '(.+)'$/.exec(
|
||||
(await execa('npm', ['show', packageName, 'version', 'gitHead'], {env: testEnv})).stdout
|
||||
(await execa('npm', ['show', packageName, 'version', 'gitHead'], {env: testEnv, cwd})).stdout
|
||||
);
|
||||
let gitHead = await getGitHead();
|
||||
let gitHead = await getGitHead({cwd});
|
||||
t.is(releasedVersion, version);
|
||||
t.is(releasedGitHead, gitHead);
|
||||
t.is(await gitTagHead(`v${version}`), gitHead);
|
||||
t.is(await gitRemoteTagHead(authUrl, `v${version}`), gitHead);
|
||||
t.is(await gitTagHead(`v${version}`, {cwd}), gitHead);
|
||||
t.is(await gitRemoteTagHead(authUrl, `v${version}`, {cwd}), gitHead);
|
||||
t.log(`+ released ${releasedVersion} with gitHead ${releasedGitHead}`);
|
||||
|
||||
await mockServer.verify(verifyMock);
|
||||
@ -179,26 +136,26 @@ test.serial('Release patch, minor and major versions', async t => {
|
||||
);
|
||||
|
||||
t.log('Commit a fix');
|
||||
await gitCommits(['fix: bar']);
|
||||
await gitCommits(['fix: bar'], {cwd});
|
||||
t.log('$ semantic-release');
|
||||
({stdout, code} = await execa(cli, [], {env}));
|
||||
({stdout, code} = await execa(cli, [], {env, cwd}));
|
||||
t.regex(stdout, new RegExp(`Published GitHub release: release-url/${version}`));
|
||||
t.regex(stdout, new RegExp(`Publishing version ${version} to npm registry`));
|
||||
t.is(code, 0);
|
||||
|
||||
// Verify package.json and npm-shrinkwrap.json have been updated
|
||||
t.is((await readJson('./package.json')).version, version);
|
||||
t.is((await readJson('./npm-shrinkwrap.json')).version, version);
|
||||
t.is((await readJson(path.resolve(cwd, 'package.json'))).version, version);
|
||||
t.is((await readJson(path.resolve(cwd, 'npm-shrinkwrap.json'))).version, version);
|
||||
|
||||
// Retrieve the published package from the registry and check version and gitHead
|
||||
[, releasedVersion, releasedGitHead] = /^version = '(.+)'\s+gitHead = '(.+)'$/.exec(
|
||||
(await execa('npm', ['show', packageName, 'version', 'gitHead'], {env: testEnv})).stdout
|
||||
(await execa('npm', ['show', packageName, 'version', 'gitHead'], {env: testEnv, cwd})).stdout
|
||||
);
|
||||
gitHead = await getGitHead();
|
||||
gitHead = await getGitHead({cwd});
|
||||
t.is(releasedVersion, version);
|
||||
t.is(releasedGitHead, gitHead);
|
||||
t.is(await gitTagHead(`v${version}`), gitHead);
|
||||
t.is(await gitRemoteTagHead(authUrl, `v${version}`), gitHead);
|
||||
t.is(await gitTagHead(`v${version}`, {cwd}), gitHead);
|
||||
t.is(await gitRemoteTagHead(authUrl, `v${version}`, {cwd}), gitHead);
|
||||
t.log(`+ released ${releasedVersion} with gitHead ${releasedGitHead}`);
|
||||
|
||||
await mockServer.verify(verifyMock);
|
||||
@ -221,26 +178,26 @@ test.serial('Release patch, minor and major versions', async t => {
|
||||
);
|
||||
|
||||
t.log('Commit a feature');
|
||||
await gitCommits(['feat: baz']);
|
||||
await gitCommits(['feat: baz'], {cwd});
|
||||
t.log('$ semantic-release');
|
||||
({stdout, code} = await execa(cli, [], {env}));
|
||||
({stdout, code} = await execa(cli, [], {env, cwd}));
|
||||
t.regex(stdout, new RegExp(`Published GitHub release: release-url/${version}`));
|
||||
t.regex(stdout, new RegExp(`Publishing version ${version} to npm registry`));
|
||||
t.is(code, 0);
|
||||
|
||||
// Verify package.json and npm-shrinkwrap.json have been updated
|
||||
t.is((await readJson('./package.json')).version, version);
|
||||
t.is((await readJson('./npm-shrinkwrap.json')).version, version);
|
||||
t.is((await readJson(path.resolve(cwd, 'package.json'))).version, version);
|
||||
t.is((await readJson(path.resolve(cwd, 'npm-shrinkwrap.json'))).version, version);
|
||||
|
||||
// Retrieve the published package from the registry and check version and gitHead
|
||||
[, releasedVersion, releasedGitHead] = /^version = '(.+)'\s+gitHead = '(.+)'$/.exec(
|
||||
(await execa('npm', ['show', packageName, 'version', 'gitHead'], {env: testEnv})).stdout
|
||||
(await execa('npm', ['show', packageName, 'version', 'gitHead'], {env: testEnv, cwd})).stdout
|
||||
);
|
||||
gitHead = await getGitHead();
|
||||
gitHead = await getGitHead({cwd});
|
||||
t.is(releasedVersion, version);
|
||||
t.is(releasedGitHead, gitHead);
|
||||
t.is(await gitTagHead(`v${version}`), gitHead);
|
||||
t.is(await gitRemoteTagHead(authUrl, `v${version}`), gitHead);
|
||||
t.is(await gitTagHead(`v${version}`, {cwd}), gitHead);
|
||||
t.is(await gitRemoteTagHead(authUrl, `v${version}`, {cwd}), gitHead);
|
||||
t.log(`+ released ${releasedVersion} with gitHead ${releasedGitHead}`);
|
||||
|
||||
await mockServer.verify(verifyMock);
|
||||
@ -263,97 +220,97 @@ test.serial('Release patch, minor and major versions', async t => {
|
||||
);
|
||||
|
||||
t.log('Commit a breaking change');
|
||||
await gitCommits(['feat: foo\n\n BREAKING CHANGE: bar']);
|
||||
await gitCommits(['feat: foo\n\n BREAKING CHANGE: bar'], {cwd});
|
||||
t.log('$ semantic-release');
|
||||
({stdout, code} = await execa(cli, [], {env}));
|
||||
({stdout, code} = await execa(cli, [], {env, cwd}));
|
||||
t.regex(stdout, new RegExp(`Published GitHub release: release-url/${version}`));
|
||||
t.regex(stdout, new RegExp(`Publishing version ${version} to npm registry`));
|
||||
t.is(code, 0);
|
||||
|
||||
// Verify package.json and npm-shrinkwrap.json have been updated
|
||||
t.is((await readJson('./package.json')).version, version);
|
||||
t.is((await readJson('./npm-shrinkwrap.json')).version, version);
|
||||
t.is((await readJson(path.resolve(cwd, 'package.json'))).version, version);
|
||||
t.is((await readJson(path.resolve(cwd, 'npm-shrinkwrap.json'))).version, version);
|
||||
|
||||
// Retrieve the published package from the registry and check version and gitHead
|
||||
[, releasedVersion, releasedGitHead] = /^version = '(.+)'\s+gitHead = '(.+)'$/.exec(
|
||||
(await execa('npm', ['show', packageName, 'version', 'gitHead'], {env: testEnv})).stdout
|
||||
(await execa('npm', ['show', packageName, 'version', 'gitHead'], {env: testEnv, cwd})).stdout
|
||||
);
|
||||
gitHead = await getGitHead();
|
||||
gitHead = await getGitHead({cwd});
|
||||
t.is(releasedVersion, version);
|
||||
t.is(releasedGitHead, gitHead);
|
||||
t.is(await gitTagHead(`v${version}`), gitHead);
|
||||
t.is(await gitRemoteTagHead(authUrl, `v${version}`), gitHead);
|
||||
t.is(await gitTagHead(`v${version}`, {cwd}), gitHead);
|
||||
t.is(await gitRemoteTagHead(authUrl, `v${version}`, {cwd}), gitHead);
|
||||
t.log(`+ released ${releasedVersion} with gitHead ${releasedGitHead}`);
|
||||
|
||||
await mockServer.verify(verifyMock);
|
||||
await mockServer.verify(createReleaseMock);
|
||||
});
|
||||
|
||||
test.serial('Exit with 1 if a plugin is not found', async t => {
|
||||
test('Exit with 1 if a plugin is not found', async t => {
|
||||
const packageName = 'test-plugin-not-found';
|
||||
const owner = 'test-repo';
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
t.log('Create git repository');
|
||||
await gitRepo();
|
||||
await writeJson('./package.json', {
|
||||
const {cwd} = await gitRepo();
|
||||
await writeJson(path.resolve(cwd, 'package.json'), {
|
||||
name: packageName,
|
||||
version: '0.0.0-dev',
|
||||
repository: {url: `git+https://github.com/${owner}/${packageName}`},
|
||||
release: {analyzeCommits: 'non-existing-path', success: false, fail: false},
|
||||
});
|
||||
|
||||
const {code, stderr} = await t.throws(execa(cli, [], {env}));
|
||||
const {code, stderr} = await t.throws(execa(cli, [], {env, cwd}));
|
||||
t.is(code, 1);
|
||||
t.regex(stderr, /Cannot find module/);
|
||||
});
|
||||
|
||||
test.serial('Exit with 1 if a shareable config is not found', async t => {
|
||||
test('Exit with 1 if a shareable config is not found', async t => {
|
||||
const packageName = 'test-config-not-found';
|
||||
const owner = 'test-repo';
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
t.log('Create git repository');
|
||||
await gitRepo();
|
||||
await writeJson('./package.json', {
|
||||
const {cwd} = await gitRepo();
|
||||
await writeJson(path.resolve(cwd, 'package.json'), {
|
||||
name: packageName,
|
||||
version: '0.0.0-dev',
|
||||
repository: {url: `git+https://github.com/${owner}/${packageName}`},
|
||||
release: {extends: 'non-existing-path', success: false, fail: false},
|
||||
});
|
||||
|
||||
const {code, stderr} = await t.throws(execa(cli, [], {env}));
|
||||
const {code, stderr} = await t.throws(execa(cli, [], {env, cwd}));
|
||||
t.is(code, 1);
|
||||
t.regex(stderr, /Cannot find module/);
|
||||
});
|
||||
|
||||
test.serial('Exit with 1 if a shareable config reference a not found plugin', async t => {
|
||||
test('Exit with 1 if a shareable config reference a not found plugin', async t => {
|
||||
const packageName = 'test-config-ref-not-found';
|
||||
const owner = 'test-repo';
|
||||
const shareable = {analyzeCommits: 'non-existing-path'};
|
||||
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
t.log('Create git repository');
|
||||
await gitRepo();
|
||||
await writeJson('./package.json', {
|
||||
const {cwd} = await gitRepo();
|
||||
await writeJson(path.resolve(cwd, 'package.json'), {
|
||||
name: packageName,
|
||||
version: '0.0.0-dev',
|
||||
repository: {url: `git+https://github.com/${owner}/${packageName}`},
|
||||
release: {extends: './shareable.json', success: false, fail: false},
|
||||
});
|
||||
await writeJson('./shareable.json', shareable);
|
||||
await writeJson(path.resolve(cwd, 'shareable.json'), shareable);
|
||||
|
||||
const {code, stderr} = await t.throws(execa(cli, [], {env}));
|
||||
const {code, stderr} = await t.throws(execa(cli, [], {env, cwd}));
|
||||
t.is(code, 1);
|
||||
t.regex(stderr, /Cannot find module/);
|
||||
});
|
||||
|
||||
test.serial('Dry-run', async t => {
|
||||
test('Dry-run', async t => {
|
||||
const packageName = 'test-dry-run';
|
||||
const owner = 'git';
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
t.log('Create git repository and package.json');
|
||||
const {repositoryUrl} = await gitbox.createRepo(packageName);
|
||||
const {cwd, repositoryUrl} = await gitbox.createRepo(packageName);
|
||||
// Create package.json in repository root
|
||||
await writeJson('./package.json', {
|
||||
await writeJson(path.resolve(cwd, 'package.json'), {
|
||||
name: packageName,
|
||||
version: '0.0.0-dev',
|
||||
repository: {url: repositoryUrl},
|
||||
@ -369,29 +326,30 @@ test.serial('Dry-run', async t => {
|
||||
);
|
||||
const version = '1.0.0';
|
||||
t.log('Commit a feature');
|
||||
await gitCommits(['feat: Initial commit']);
|
||||
await gitCommits(['feat: Initial commit'], {cwd});
|
||||
t.log('$ semantic-release -d');
|
||||
const {stdout, code} = await execa(cli, ['-d'], {env});
|
||||
const {stdout, code} = await execa(cli, ['-d'], {env, cwd});
|
||||
t.regex(stdout, new RegExp(`There is no previous release, the next release version is ${version}`));
|
||||
t.regex(stdout, new RegExp(`Release note for version ${version}`));
|
||||
t.regex(stdout, /Initial commit/);
|
||||
t.is(code, 0);
|
||||
|
||||
// Verify package.json and has not been modified
|
||||
t.is((await readJson('./package.json')).version, '0.0.0-dev');
|
||||
t.is((await readJson(path.resolve(cwd, 'package.json'))).version, '0.0.0-dev');
|
||||
await mockServer.verify(verifyMock);
|
||||
});
|
||||
|
||||
test.serial('Allow local releases with "noCi" option', async t => {
|
||||
delete process.env.TRAVIS;
|
||||
delete process.env.CI;
|
||||
test('Allow local releases with "noCi" option', async t => {
|
||||
const envNoCi = {...env};
|
||||
delete envNoCi.TRAVIS;
|
||||
delete envNoCi.CI;
|
||||
const packageName = 'test-no-ci';
|
||||
const owner = 'git';
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
t.log('Create git repository and package.json');
|
||||
const {repositoryUrl, authUrl} = await gitbox.createRepo(packageName);
|
||||
const {cwd, repositoryUrl, authUrl} = await gitbox.createRepo(packageName);
|
||||
// Create package.json in repository root
|
||||
await writeJson('./package.json', {
|
||||
await writeJson(path.resolve(cwd, 'package.json'), {
|
||||
name: packageName,
|
||||
version: '0.0.0-dev',
|
||||
repository: {url: repositoryUrl},
|
||||
@ -416,39 +374,39 @@ test.serial('Allow local releases with "noCi" option', async t => {
|
||||
);
|
||||
|
||||
t.log('Commit a feature');
|
||||
await gitCommits(['feat: Initial commit']);
|
||||
await gitCommits(['feat: Initial commit'], {cwd});
|
||||
t.log('$ semantic-release --no-ci');
|
||||
const {stdout, code} = await execa(cli, ['--no-ci'], {env});
|
||||
const {stdout, code} = await execa(cli, ['--no-ci'], {env: envNoCi, cwd});
|
||||
t.regex(stdout, new RegExp(`Published GitHub release: release-url/${version}`));
|
||||
t.regex(stdout, new RegExp(`Publishing version ${version} to npm registry`));
|
||||
t.is(code, 0);
|
||||
|
||||
// Verify package.json and has been updated
|
||||
t.is((await readJson('./package.json')).version, version);
|
||||
t.is((await readJson(path.resolve(cwd, 'package.json'))).version, version);
|
||||
|
||||
// Retrieve the published package from the registry and check version and gitHead
|
||||
const [, releasedVersion, releasedGitHead] = /^version = '(.+)'\s+gitHead = '(.+)'$/.exec(
|
||||
(await execa('npm', ['show', packageName, 'version', 'gitHead'], {env: testEnv})).stdout
|
||||
(await execa('npm', ['show', packageName, 'version', 'gitHead'], {env: testEnv, cwd})).stdout
|
||||
);
|
||||
|
||||
const gitHead = await getGitHead();
|
||||
const gitHead = await getGitHead({cwd});
|
||||
t.is(releasedVersion, version);
|
||||
t.is(releasedGitHead, gitHead);
|
||||
t.is(await gitTagHead(`v${version}`), gitHead);
|
||||
t.is(await gitRemoteTagHead(authUrl, `v${version}`), gitHead);
|
||||
t.is(await gitTagHead(`v${version}`, {cwd}), gitHead);
|
||||
t.is(await gitRemoteTagHead(authUrl, `v${version}`, {cwd}), gitHead);
|
||||
t.log(`+ released ${releasedVersion} with gitHead ${releasedGitHead}`);
|
||||
|
||||
await mockServer.verify(verifyMock);
|
||||
await mockServer.verify(createReleaseMock);
|
||||
});
|
||||
|
||||
test.serial('Pass options via CLI arguments', async t => {
|
||||
test('Pass options via CLI arguments', async t => {
|
||||
const packageName = 'test-cli';
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
t.log('Create git repository and package.json');
|
||||
const {repositoryUrl, authUrl} = await gitbox.createRepo(packageName);
|
||||
const {cwd, repositoryUrl, authUrl} = await gitbox.createRepo(packageName);
|
||||
// Create package.json in repository root
|
||||
await writeJson('./package.json', {
|
||||
await writeJson(path.resolve(cwd, 'package.json'), {
|
||||
name: packageName,
|
||||
version: '0.0.0-dev',
|
||||
repository: {url: repositoryUrl},
|
||||
@ -458,7 +416,7 @@ test.serial('Pass options via CLI arguments', async t => {
|
||||
/* Initial release */
|
||||
const version = '1.0.0';
|
||||
t.log('Commit a feature');
|
||||
await gitCommits(['feat: Initial commit']);
|
||||
await gitCommits(['feat: Initial commit'], {cwd});
|
||||
t.log('$ semantic-release');
|
||||
const {stdout, code} = await execa(
|
||||
cli,
|
||||
@ -473,34 +431,38 @@ test.serial('Pass options via CLI arguments', async t => {
|
||||
false,
|
||||
'--debug',
|
||||
],
|
||||
{env}
|
||||
{env, cwd}
|
||||
);
|
||||
t.regex(stdout, new RegExp(`Publishing version ${version} to npm registry`));
|
||||
t.is(code, 0);
|
||||
|
||||
// Verify package.json and has been updated
|
||||
t.is((await readJson('./package.json')).version, version);
|
||||
t.is((await readJson(path.resolve(cwd, 'package.json'))).version, version);
|
||||
|
||||
// Retrieve the published package from the registry and check version and gitHead
|
||||
const [, releasedVersion, releasedGitHead] = /^version = '(.+)'\s+gitHead = '(.+)'$/.exec(
|
||||
(await execa('npm', ['show', packageName, 'version', 'gitHead'], {env: testEnv})).stdout
|
||||
(await execa('npm', ['show', packageName, 'version', 'gitHead'], {env: testEnv, cwd})).stdout
|
||||
);
|
||||
const gitHead = await getGitHead();
|
||||
const gitHead = await getGitHead({cwd});
|
||||
t.is(releasedVersion, version);
|
||||
t.is(releasedGitHead, gitHead);
|
||||
t.is(await gitTagHead(`v${version}`), gitHead);
|
||||
t.is(await gitRemoteTagHead(authUrl, `v${version}`), gitHead);
|
||||
t.is(await gitTagHead(`v${version}`, {cwd}), gitHead);
|
||||
t.is(await gitRemoteTagHead(authUrl, `v${version}`, {cwd}), gitHead);
|
||||
t.log(`+ released ${releasedVersion} with gitHead ${releasedGitHead}`);
|
||||
});
|
||||
|
||||
test.serial('Run via JS API', async t => {
|
||||
test('Run via JS API', async t => {
|
||||
const semanticRelease = requireNoCache('..', {
|
||||
'./lib/logger': {log: () => {}, error: () => {}, stdout: () => {}},
|
||||
'env-ci': () => ({isCi: true, branch: 'master', isPr: false}),
|
||||
});
|
||||
const packageName = 'test-js-api';
|
||||
const owner = 'git';
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
t.log('Create git repository and package.json');
|
||||
const {repositoryUrl, authUrl} = await gitbox.createRepo(packageName);
|
||||
const {cwd, repositoryUrl, authUrl} = await gitbox.createRepo(packageName);
|
||||
// Create package.json in repository root
|
||||
await writeJson('./package.json', {
|
||||
await writeJson(path.resolve(cwd, 'package.json'), {
|
||||
name: packageName,
|
||||
version: '0.0.0-dev',
|
||||
repository: {url: repositoryUrl},
|
||||
@ -523,38 +485,36 @@ test.serial('Run via JS API', async t => {
|
||||
{body: {html_url: `release-url/${version}`}}
|
||||
);
|
||||
|
||||
process.env = Object.assign(process.env, env);
|
||||
|
||||
t.log('Commit a feature');
|
||||
await gitCommits(['feat: Initial commit']);
|
||||
await gitCommits(['feat: Initial commit'], {cwd});
|
||||
t.log('$ Call semantic-release via API');
|
||||
await semanticRelease({fail: false, success: false});
|
||||
await semanticRelease({fail: false, success: false}, {cwd, env});
|
||||
|
||||
// Verify package.json and has been updated
|
||||
t.is((await readJson('./package.json')).version, version);
|
||||
t.is((await readJson(path.resolve(cwd, 'package.json'))).version, version);
|
||||
|
||||
// Retrieve the published package from the registry and check version and gitHead
|
||||
const [, releasedVersion, releasedGitHead] = /^version = '(.+)'\s+gitHead = '(.+)'$/.exec(
|
||||
(await execa('npm', ['show', packageName, 'version', 'gitHead'], {env: testEnv})).stdout
|
||||
(await execa('npm', ['show', packageName, 'version', 'gitHead'], {env: testEnv, cwd})).stdout
|
||||
);
|
||||
const gitHead = await getGitHead();
|
||||
const gitHead = await getGitHead({cwd});
|
||||
t.is(releasedVersion, version);
|
||||
t.is(releasedGitHead, gitHead);
|
||||
t.is(await gitTagHead(`v${version}`), gitHead);
|
||||
t.is(await gitRemoteTagHead(authUrl, `v${version}`), gitHead);
|
||||
t.is(await gitTagHead(`v${version}`, {cwd}), gitHead);
|
||||
t.is(await gitRemoteTagHead(authUrl, `v${version}`, {cwd}), gitHead);
|
||||
t.log(`+ released ${releasedVersion} with gitHead ${releasedGitHead}`);
|
||||
|
||||
await mockServer.verify(verifyMock);
|
||||
await mockServer.verify(createReleaseMock);
|
||||
});
|
||||
|
||||
test.serial('Log unexpected errors from plugins and exit with 1', async t => {
|
||||
test('Log unexpected errors from plugins and exit with 1', async t => {
|
||||
const packageName = 'test-unexpected-error';
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
t.log('Create git repository and package.json');
|
||||
const {repositoryUrl} = await gitbox.createRepo(packageName);
|
||||
const {cwd, repositoryUrl} = await gitbox.createRepo(packageName);
|
||||
// Create package.json in repository root
|
||||
await writeJson('./package.json', {
|
||||
await writeJson(path.resolve(cwd, 'package.json'), {
|
||||
name: packageName,
|
||||
version: '0.0.0-dev',
|
||||
repository: {url: repositoryUrl},
|
||||
@ -563,9 +523,9 @@ test.serial('Log unexpected errors from plugins and exit with 1', async t => {
|
||||
|
||||
/* Initial release */
|
||||
t.log('Commit a feature');
|
||||
await gitCommits(['feat: Initial commit']);
|
||||
await gitCommits(['feat: Initial commit'], {cwd});
|
||||
t.log('$ semantic-release');
|
||||
const {stderr, code} = await execa(cli, [], {env, reject: false});
|
||||
const {stderr, code} = await execa(cli, [], {env, cwd, reject: false});
|
||||
// Verify the type and message are logged
|
||||
t.regex(stderr, /Error: a/);
|
||||
// Verify the the stacktrace is logged
|
||||
@ -575,13 +535,13 @@ test.serial('Log unexpected errors from plugins and exit with 1', async t => {
|
||||
t.is(code, 1);
|
||||
});
|
||||
|
||||
test.serial('Log errors inheriting SemanticReleaseError and exit with 1', async t => {
|
||||
test('Log errors inheriting SemanticReleaseError and exit with 1', async t => {
|
||||
const packageName = 'test-inherited-error';
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
t.log('Create git repository and package.json');
|
||||
const {repositoryUrl} = await gitbox.createRepo(packageName);
|
||||
const {cwd, repositoryUrl} = await gitbox.createRepo(packageName);
|
||||
// Create package.json in repository root
|
||||
await writeJson('./package.json', {
|
||||
await writeJson(path.resolve(cwd, 'package.json'), {
|
||||
name: packageName,
|
||||
version: '0.0.0-dev',
|
||||
repository: {url: repositoryUrl},
|
||||
@ -590,33 +550,53 @@ test.serial('Log errors inheriting SemanticReleaseError and exit with 1', async
|
||||
|
||||
/* Initial release */
|
||||
t.log('Commit a feature');
|
||||
await gitCommits(['feat: Initial commit']);
|
||||
await gitCommits(['feat: Initial commit'], {cwd});
|
||||
t.log('$ semantic-release');
|
||||
const {stdout, code} = await execa(cli, [], {env, reject: false});
|
||||
const {stdout, code} = await execa(cli, [], {env, cwd, reject: false});
|
||||
// Verify the type and message are logged
|
||||
t.regex(stdout, /EINHERITED Inherited error/);
|
||||
t.is(code, 1);
|
||||
});
|
||||
|
||||
test.serial('Exit with 1 if missing permission to push to the remote repository', async t => {
|
||||
test('Exit with 1 if missing permission to push to the remote repository', async t => {
|
||||
const packageName = 'unauthorized';
|
||||
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
t.log('Create git repository');
|
||||
await gitbox.createRepo(packageName);
|
||||
await writeJson('./package.json', {name: packageName, version: '0.0.0-dev'});
|
||||
const {cwd} = await gitbox.createRepo(packageName);
|
||||
await writeJson(path.resolve(cwd, 'package.json'), {name: packageName, version: '0.0.0-dev'});
|
||||
|
||||
/* Initial release */
|
||||
t.log('Commit a feature');
|
||||
await gitCommits(['feat: Initial commit']);
|
||||
await gitPush();
|
||||
await gitCommits(['feat: Initial commit'], {cwd});
|
||||
await gitPush('origin', 'master', {cwd});
|
||||
t.log('$ semantic-release');
|
||||
const {stdout, code} = await execa(
|
||||
cli,
|
||||
['--repository-url', 'http://user:wrong_pass@localhost:2080/git/unauthorized.git'],
|
||||
{env: {...env, GH_TOKEN: 'user:wrong_pass'}, reject: false}
|
||||
{env: {...env, GH_TOKEN: 'user:wrong_pass'}, cwd, reject: false}
|
||||
);
|
||||
// Verify the type and message are logged
|
||||
t.regex(stdout, /EGITNOPERMISSION/);
|
||||
t.is(code, 1);
|
||||
});
|
||||
|
||||
test('Hide sensitive environment variable values from the logs', async t => {
|
||||
const packageName = 'log-secret';
|
||||
// Create a git repository, set the current working directory at the root of the repo
|
||||
t.log('Create git repository');
|
||||
const {cwd, repositoryUrl} = await gitbox.createRepo(packageName);
|
||||
await writeJson(path.resolve(cwd, 'package.json'), {
|
||||
name: packageName,
|
||||
version: '0.0.0-dev',
|
||||
repository: {url: repositoryUrl},
|
||||
release: {verifyConditions: [pluginLogEnv], fail: false, success: false},
|
||||
});
|
||||
|
||||
t.log('$ semantic-release');
|
||||
const {stdout, stderr} = await execa(cli, [], {env: {...env, MY_TOKEN: 'secret token'}, cwd, reject: false});
|
||||
|
||||
t.regex(stdout, new RegExp(`Console: Exposing token ${escapeRegExp(SECRET_REPLACEMENT)}`));
|
||||
t.regex(stdout, new RegExp(`Log: Exposing token ${escapeRegExp(SECRET_REPLACEMENT)}`));
|
||||
t.regex(stderr, new RegExp(`Error: Console token ${escapeRegExp(SECRET_REPLACEMENT)}`));
|
||||
t.regex(stderr, new RegExp(`Throw error: Exposing ${escapeRegExp(SECRET_REPLACEMENT)}`));
|
||||
});
|
||||
|
@ -3,6 +3,8 @@ import {noop} from 'lodash';
|
||||
import {stub} from 'sinon';
|
||||
import normalize from '../../lib/plugins/normalize';
|
||||
|
||||
const cwd = process.cwd();
|
||||
|
||||
test.beforeEach(t => {
|
||||
// Stub the logger functions
|
||||
t.context.log = stub();
|
||||
@ -10,7 +12,12 @@ test.beforeEach(t => {
|
||||
});
|
||||
|
||||
test('Normalize and load plugin from string', t => {
|
||||
const plugin = normalize('verifyConditions', {}, {}, './test/fixtures/plugin-noop', t.context.logger);
|
||||
const plugin = normalize(
|
||||
{cwd, options: {}, logger: t.context.logger},
|
||||
'verifyConditions',
|
||||
'./test/fixtures/plugin-noop',
|
||||
{}
|
||||
);
|
||||
|
||||
t.is(plugin.pluginName, './test/fixtures/plugin-noop');
|
||||
t.is(typeof plugin, 'function');
|
||||
@ -18,7 +25,12 @@ test('Normalize and load plugin from string', t => {
|
||||
});
|
||||
|
||||
test('Normalize and load plugin from object', t => {
|
||||
const plugin = normalize('publish', {}, {}, {path: './test/fixtures/plugin-noop'}, t.context.logger);
|
||||
const plugin = normalize(
|
||||
{cwd, options: {}, logger: t.context.logger},
|
||||
'publish',
|
||||
{path: './test/fixtures/plugin-noop'},
|
||||
{}
|
||||
);
|
||||
|
||||
t.is(plugin.pluginName, './test/fixtures/plugin-noop');
|
||||
t.is(typeof plugin, 'function');
|
||||
@ -26,13 +38,9 @@ test('Normalize and load plugin from object', t => {
|
||||
});
|
||||
|
||||
test('Normalize and load plugin from a base file path', t => {
|
||||
const plugin = normalize(
|
||||
'verifyConditions',
|
||||
{'./plugin-noop': './test/fixtures'},
|
||||
{},
|
||||
'./plugin-noop',
|
||||
t.context.logger
|
||||
);
|
||||
const plugin = normalize({cwd, options: {}, logger: t.context.logger}, 'verifyConditions', './plugin-noop', {
|
||||
'./plugin-noop': './test/fixtures',
|
||||
});
|
||||
|
||||
t.is(plugin.pluginName, './plugin-noop');
|
||||
t.is(typeof plugin, 'function');
|
||||
@ -45,13 +53,9 @@ test('Normalize and load plugin from a base file path', t => {
|
||||
});
|
||||
|
||||
test('Wrap plugin in a function that add the "pluginName" to the error"', async t => {
|
||||
const plugin = normalize(
|
||||
'verifyConditions',
|
||||
{'./plugin-error': './test/fixtures'},
|
||||
{},
|
||||
'./plugin-error',
|
||||
t.context.logger
|
||||
);
|
||||
const plugin = normalize({cwd, options: {}, logger: t.context.logger}, 'verifyConditions', './plugin-error', {
|
||||
'./plugin-error': './test/fixtures',
|
||||
});
|
||||
|
||||
const error = await t.throws(plugin());
|
||||
|
||||
@ -59,13 +63,9 @@ test('Wrap plugin in a function that add the "pluginName" to the error"', async
|
||||
});
|
||||
|
||||
test('Wrap plugin in a function that add the "pluginName" to multiple errors"', async t => {
|
||||
const plugin = normalize(
|
||||
'verifyConditions',
|
||||
{'./plugin-errors': './test/fixtures'},
|
||||
{},
|
||||
'./plugin-errors',
|
||||
t.context.logger
|
||||
);
|
||||
const plugin = normalize({cwd, options: {}, logger: t.context.logger}, 'verifyConditions', './plugin-errors', {
|
||||
'./plugin-errors': './test/fixtures',
|
||||
});
|
||||
|
||||
const errors = [...(await t.throws(plugin()))];
|
||||
for (const error of errors) {
|
||||
@ -75,14 +75,19 @@ test('Wrap plugin in a function that add the "pluginName" to multiple errors"',
|
||||
|
||||
test('Normalize and load plugin from function', t => {
|
||||
const pluginFunction = () => {};
|
||||
const plugin = normalize('', {}, {}, pluginFunction, t.context.logger);
|
||||
const plugin = normalize({cwd, options: {}, logger: t.context.logger}, '', pluginFunction, {});
|
||||
|
||||
t.is(plugin.pluginName, '[Function: pluginFunction]');
|
||||
t.is(typeof plugin, 'function');
|
||||
});
|
||||
|
||||
test('Normalize and load plugin that retuns multiple functions', t => {
|
||||
const plugin = normalize('verifyConditions', {}, {}, './test/fixtures/multi-plugin', t.context.logger);
|
||||
const plugin = normalize(
|
||||
{cwd, options: {}, logger: t.context.logger},
|
||||
'verifyConditions',
|
||||
'./test/fixtures/multi-plugin',
|
||||
{}
|
||||
);
|
||||
|
||||
t.is(typeof plugin, 'function');
|
||||
t.deepEqual(t.context.log.args[0], ['Load plugin "%s" from %s', 'verifyConditions', './test/fixtures/multi-plugin']);
|
||||
@ -90,7 +95,7 @@ test('Normalize and load plugin that retuns multiple functions', t => {
|
||||
|
||||
test('Wrap "analyzeCommits" plugin in a function that validate the output of the plugin', async t => {
|
||||
const analyzeCommits = stub().resolves(2);
|
||||
const plugin = normalize('analyzeCommits', {}, {}, analyzeCommits, t.context.logger);
|
||||
const plugin = normalize({cwd, options: {}, logger: t.context.logger}, 'analyzeCommits', analyzeCommits, {});
|
||||
|
||||
const error = await t.throws(plugin());
|
||||
|
||||
@ -103,7 +108,7 @@ test('Wrap "analyzeCommits" plugin in a function that validate the output of the
|
||||
|
||||
test('Wrap "generateNotes" plugin in a function that validate the output of the plugin', async t => {
|
||||
const generateNotes = stub().resolves(2);
|
||||
const plugin = normalize('generateNotes', {}, {}, generateNotes, t.context.logger);
|
||||
const plugin = normalize({cwd, options: {}, logger: t.context.logger}, 'generateNotes', generateNotes, {});
|
||||
|
||||
const error = await t.throws(plugin());
|
||||
|
||||
@ -115,13 +120,9 @@ test('Wrap "generateNotes" plugin in a function that validate the output of the
|
||||
});
|
||||
|
||||
test('Wrap "publish" plugin in a function that validate the output of the plugin', async t => {
|
||||
const plugin = normalize(
|
||||
'publish',
|
||||
{'./plugin-identity': './test/fixtures'},
|
||||
{},
|
||||
'./plugin-identity',
|
||||
t.context.logger
|
||||
);
|
||||
const plugin = normalize({cwd, options: {}, logger: t.context.logger}, 'publish', './plugin-identity', {
|
||||
'./plugin-identity': './test/fixtures',
|
||||
});
|
||||
|
||||
const error = await t.throws(plugin(2));
|
||||
|
||||
@ -134,9 +135,9 @@ test('Wrap "publish" plugin in a function that validate the output of the plugin
|
||||
|
||||
test('Plugin is called with "pluginConfig" (omitting "path", adding global config) and input', async t => {
|
||||
const pluginFunction = stub().resolves();
|
||||
const conf = {path: pluginFunction, conf: 'confValue'};
|
||||
const globalConf = {global: 'globalValue'};
|
||||
const plugin = normalize('', {}, globalConf, conf, t.context.logger);
|
||||
const pluginConf = {path: pluginFunction, conf: 'confValue'};
|
||||
const options = {global: 'globalValue'};
|
||||
const plugin = normalize({cwd, options, logger: t.context.logger}, '', pluginConf, {});
|
||||
await plugin('param');
|
||||
|
||||
t.true(pluginFunction.calledWith({conf: 'confValue', global: 'globalValue'}, 'param'));
|
||||
@ -146,13 +147,13 @@ test('Prevent plugins to modify "pluginConfig"', async t => {
|
||||
const pluginFunction = stub().callsFake(pluginConfig => {
|
||||
pluginConfig.conf.subConf = 'otherConf';
|
||||
});
|
||||
const conf = {path: pluginFunction, conf: {subConf: 'originalConf'}};
|
||||
const globalConf = {globalConf: {globalSubConf: 'originalGlobalConf'}};
|
||||
const plugin = normalize('', {}, globalConf, conf, t.context.logger);
|
||||
const pluginConf = {path: pluginFunction, conf: {subConf: 'originalConf'}};
|
||||
const options = {globalConf: {globalSubConf: 'originalGlobalConf'}};
|
||||
const plugin = normalize({cwd, options, logger: t.context.logger}, '', pluginConf, {});
|
||||
await plugin();
|
||||
|
||||
t.is(conf.conf.subConf, 'originalConf');
|
||||
t.is(globalConf.globalConf.globalSubConf, 'originalGlobalConf');
|
||||
t.is(pluginConf.conf.subConf, 'originalConf');
|
||||
t.is(options.globalConf.globalSubConf, 'originalGlobalConf');
|
||||
});
|
||||
|
||||
test('Prevent plugins to modify its input', async t => {
|
||||
@ -160,21 +161,26 @@ test('Prevent plugins to modify its input', async t => {
|
||||
options.param.subParam = 'otherParam';
|
||||
});
|
||||
const input = {param: {subParam: 'originalSubParam'}};
|
||||
const plugin = normalize('', {}, {}, pluginFunction, t.context.logger);
|
||||
const plugin = normalize({cwd, options: {}, logger: t.context.logger}, '', pluginFunction, {});
|
||||
await plugin(input);
|
||||
|
||||
t.is(input.param.subParam, 'originalSubParam');
|
||||
});
|
||||
|
||||
test('Return noop if the plugin is not defined', t => {
|
||||
const plugin = normalize();
|
||||
const plugin = normalize({cwd, options: {}, logger: t.context.logger});
|
||||
|
||||
t.is(plugin, noop);
|
||||
});
|
||||
|
||||
test('Always pass a defined "pluginConfig" for plugin defined with string', async t => {
|
||||
// Call the normalize function with the path of a plugin that returns its config
|
||||
const plugin = normalize('', {}, {}, './test/fixtures/plugin-result-config', t.context.logger);
|
||||
const plugin = normalize(
|
||||
{cwd, options: {}, logger: t.context.logger},
|
||||
'',
|
||||
'./test/fixtures/plugin-result-config',
|
||||
{}
|
||||
);
|
||||
const pluginResult = await plugin();
|
||||
|
||||
t.deepEqual(pluginResult.pluginConfig, {});
|
||||
@ -182,14 +188,21 @@ test('Always pass a defined "pluginConfig" for plugin defined with string', asyn
|
||||
|
||||
test('Always pass a defined "pluginConfig" for plugin defined with path', async t => {
|
||||
// Call the normalize function with the path of a plugin that returns its config
|
||||
const plugin = normalize('', {}, {}, {path: './test/fixtures/plugin-result-config'}, t.context.logger);
|
||||
const plugin = normalize(
|
||||
{cwd, options: {}, logger: t.context.logger},
|
||||
'',
|
||||
{path: './test/fixtures/plugin-result-config'},
|
||||
{}
|
||||
);
|
||||
const pluginResult = await plugin();
|
||||
|
||||
t.deepEqual(pluginResult.pluginConfig, {});
|
||||
});
|
||||
|
||||
test('Throws an error if the plugin return an object without the expected plugin function', t => {
|
||||
const error = t.throws(() => normalize('inexistantPlugin', {}, {}, './test/fixtures/multi-plugin', t.context.logger));
|
||||
const error = t.throws(() =>
|
||||
normalize({cwd, options: {}, logger: t.context.logger}, 'inexistantPlugin', './test/fixtures/multi-plugin', {})
|
||||
);
|
||||
|
||||
t.is(error.code, 'EPLUGIN');
|
||||
t.is(error.name, 'SemanticReleaseError');
|
||||
@ -198,7 +211,10 @@ test('Throws an error if the plugin return an object without the expected plugin
|
||||
});
|
||||
|
||||
test('Throws an error if the plugin is not found', t => {
|
||||
const error = t.throws(() => normalize('inexistantPlugin', {}, {}, 'non-existing-path', t.context.logger), Error);
|
||||
const error = t.throws(
|
||||
() => normalize({cwd, options: {}, logger: t.context.logger}, 'inexistantPlugin', 'non-existing-path', {}),
|
||||
Error
|
||||
);
|
||||
|
||||
t.is(error.message, "Cannot find module 'non-existing-path'");
|
||||
t.is(error.code, 'MODULE_NOT_FOUND');
|
||||
|
@ -14,13 +14,8 @@ test.beforeEach(t => {
|
||||
t.context.logger = {log: t.context.log};
|
||||
});
|
||||
|
||||
test.afterEach.always(() => {
|
||||
// Restore the current working directory
|
||||
process.chdir(cwd);
|
||||
});
|
||||
|
||||
test('Export default plugins', t => {
|
||||
const plugins = getPlugins({}, {}, t.context.logger);
|
||||
const plugins = getPlugins({cwd, options: {}, logger: t.context.logger}, {});
|
||||
|
||||
// Verify the module returns a function for each plugin
|
||||
t.is(typeof plugins.verifyConditions, 'function');
|
||||
@ -36,13 +31,16 @@ test('Export default plugins', t => {
|
||||
test('Export plugins based on config', t => {
|
||||
const plugins = getPlugins(
|
||||
{
|
||||
cwd,
|
||||
logger: t.context.logger,
|
||||
options: {
|
||||
verifyConditions: ['./test/fixtures/plugin-noop', {path: './test/fixtures/plugin-noop'}],
|
||||
generateNotes: './test/fixtures/plugin-noop',
|
||||
analyzeCommits: {path: './test/fixtures/plugin-noop'},
|
||||
verifyRelease: () => {},
|
||||
},
|
||||
{},
|
||||
t.context.logger
|
||||
},
|
||||
{}
|
||||
);
|
||||
|
||||
// Verify the module returns a function for each plugin
|
||||
@ -56,24 +54,26 @@ test('Export plugins based on config', t => {
|
||||
t.is(typeof plugins.fail, 'function');
|
||||
});
|
||||
|
||||
test.serial('Export plugins loaded from the dependency of a shareable config module', async t => {
|
||||
const temp = tempy.directory();
|
||||
test('Export plugins loaded from the dependency of a shareable config module', async t => {
|
||||
const cwd = tempy.directory();
|
||||
await copy(
|
||||
'./test/fixtures/plugin-noop.js',
|
||||
path.join(temp, 'node_modules/shareable-config/node_modules/custom-plugin/index.js')
|
||||
path.resolve(cwd, 'node_modules/shareable-config/node_modules/custom-plugin/index.js')
|
||||
);
|
||||
await outputFile(path.join(temp, 'node_modules/shareable-config/index.js'), '');
|
||||
process.chdir(temp);
|
||||
await outputFile(path.resolve(cwd, 'node_modules/shareable-config/index.js'), '');
|
||||
|
||||
const plugins = getPlugins(
|
||||
{
|
||||
cwd,
|
||||
logger: t.context.logger,
|
||||
options: {
|
||||
verifyConditions: ['custom-plugin', {path: 'custom-plugin'}],
|
||||
generateNotes: 'custom-plugin',
|
||||
analyzeCommits: {path: 'custom-plugin'},
|
||||
verifyRelease: () => {},
|
||||
},
|
||||
{'custom-plugin': 'shareable-config'},
|
||||
t.context.logger
|
||||
},
|
||||
{'custom-plugin': 'shareable-config'}
|
||||
);
|
||||
|
||||
// Verify the module returns a function for each plugin
|
||||
@ -87,21 +87,23 @@ test.serial('Export plugins loaded from the dependency of a shareable config mod
|
||||
t.is(typeof plugins.fail, 'function');
|
||||
});
|
||||
|
||||
test.serial('Export plugins loaded from the dependency of a shareable config file', async t => {
|
||||
const temp = tempy.directory();
|
||||
await copy('./test/fixtures/plugin-noop.js', path.join(temp, 'plugin/plugin-noop.js'));
|
||||
await outputFile(path.join(temp, 'shareable-config.js'), '');
|
||||
process.chdir(temp);
|
||||
test('Export plugins loaded from the dependency of a shareable config file', async t => {
|
||||
const cwd = tempy.directory();
|
||||
await copy('./test/fixtures/plugin-noop.js', path.resolve(cwd, 'plugin/plugin-noop.js'));
|
||||
await outputFile(path.resolve(cwd, 'shareable-config.js'), '');
|
||||
|
||||
const plugins = getPlugins(
|
||||
{
|
||||
cwd,
|
||||
logger: t.context.logger,
|
||||
options: {
|
||||
verifyConditions: ['./plugin/plugin-noop', {path: './plugin/plugin-noop'}],
|
||||
generateNotes: './plugin/plugin-noop',
|
||||
analyzeCommits: {path: './plugin/plugin-noop'},
|
||||
verifyRelease: () => {},
|
||||
},
|
||||
{'./plugin/plugin-noop': './shareable-config.js'},
|
||||
t.context.logger
|
||||
},
|
||||
{'./plugin/plugin-noop': './shareable-config.js'}
|
||||
);
|
||||
|
||||
// Verify the module returns a function for each plugin
|
||||
@ -121,7 +123,10 @@ test('Use default when only options are passed for a single plugin', t => {
|
||||
const success = () => {};
|
||||
const fail = [() => {}];
|
||||
|
||||
const plugins = getPlugins({analyzeCommits, generateNotes, success, fail}, {}, t.context.logger);
|
||||
const plugins = getPlugins(
|
||||
{cwd, logger: t.context.logger, options: {analyzeCommits, generateNotes, success, fail}},
|
||||
{}
|
||||
);
|
||||
|
||||
// Verify the module returns a function for each plugin
|
||||
t.is(typeof plugins.analyzeCommits, 'function');
|
||||
@ -137,12 +142,15 @@ test('Use default when only options are passed for a single plugin', t => {
|
||||
test('Merge global options with plugin options', async t => {
|
||||
const plugins = getPlugins(
|
||||
{
|
||||
cwd,
|
||||
logger: t.context.logger,
|
||||
options: {
|
||||
globalOpt: 'global',
|
||||
otherOpt: 'globally-defined',
|
||||
verifyRelease: {path: './test/fixtures/plugin-result-config', localOpt: 'local', otherOpt: 'locally-defined'},
|
||||
},
|
||||
{},
|
||||
t.context.logger
|
||||
},
|
||||
{}
|
||||
);
|
||||
|
||||
const [result] = await plugins.verifyRelease();
|
||||
@ -151,7 +159,7 @@ test('Merge global options with plugin options', async t => {
|
||||
});
|
||||
|
||||
test('Throw an error if plugins configuration are missing a path for plugin pipeline', t => {
|
||||
const errors = [...t.throws(() => getPlugins({verifyConditions: {}}, {}, t.context.logger))];
|
||||
const errors = [...t.throws(() => getPlugins({cwd, logger: t.context.logger, options: {verifyConditions: {}}}, {}))];
|
||||
|
||||
t.is(errors[0].name, 'SemanticReleaseError');
|
||||
t.is(errors[0].code, 'EPLUGINCONF');
|
||||
@ -159,7 +167,12 @@ test('Throw an error if plugins configuration are missing a path for plugin pipe
|
||||
|
||||
test('Throw an error if an array of plugin configuration is missing a path for plugin pipeline', t => {
|
||||
const errors = [
|
||||
...t.throws(() => getPlugins({verifyConditions: [{path: '@semantic-release/npm'}, {}]}, {}, t.context.logger)),
|
||||
...t.throws(() =>
|
||||
getPlugins(
|
||||
{cwd, logger: t.context.logger, options: {verifyConditions: [{path: '@semantic-release/npm'}, {}]}},
|
||||
{}
|
||||
)
|
||||
),
|
||||
];
|
||||
|
||||
t.is(errors[0].name, 'SemanticReleaseError');
|
||||
|
@ -3,31 +3,11 @@ import tempy from 'tempy';
|
||||
import verify from '../lib/verify';
|
||||
import {gitRepo} from './helpers/git-utils';
|
||||
|
||||
// Save the current process.env
|
||||
const envBackup = Object.assign({}, process.env);
|
||||
// Save the current working diretory
|
||||
const cwd = process.cwd();
|
||||
test('Throw a AggregateError', async t => {
|
||||
const {cwd} = await gitRepo();
|
||||
const options = {};
|
||||
|
||||
test.beforeEach(() => {
|
||||
// Delete environment variables that could have been set on the machine running the tests
|
||||
delete process.env.GIT_CREDENTIALS;
|
||||
delete process.env.GH_TOKEN;
|
||||
delete process.env.GITHUB_TOKEN;
|
||||
delete process.env.GL_TOKEN;
|
||||
delete process.env.GITLAB_TOKEN;
|
||||
});
|
||||
|
||||
test.afterEach.always(() => {
|
||||
// Restore process.env
|
||||
process.env = envBackup;
|
||||
// Restore the current working directory
|
||||
process.chdir(cwd);
|
||||
});
|
||||
|
||||
test.serial('Throw a AggregateError', async t => {
|
||||
await gitRepo();
|
||||
|
||||
const errors = [...(await t.throws(verify({})))];
|
||||
const errors = [...(await t.throws(verify({cwd, options})))];
|
||||
|
||||
t.is(errors[0].name, 'SemanticReleaseError');
|
||||
t.is(errors[0].code, 'ENOREPOURL');
|
||||
@ -37,49 +17,49 @@ test.serial('Throw a AggregateError', async t => {
|
||||
t.is(errors[2].code, 'ETAGNOVERSION');
|
||||
});
|
||||
|
||||
test.serial('Throw a SemanticReleaseError if does not run on a git repository', async t => {
|
||||
const dir = tempy.directory();
|
||||
process.chdir(dir);
|
||||
test('Throw a SemanticReleaseError if does not run on a git repository', async t => {
|
||||
const cwd = tempy.directory();
|
||||
const options = {};
|
||||
|
||||
const errors = [...(await t.throws(verify({})))];
|
||||
const errors = [...(await t.throws(verify({cwd, options})))];
|
||||
|
||||
t.is(errors[0].name, 'SemanticReleaseError');
|
||||
t.is(errors[0].code, 'ENOGITREPO');
|
||||
});
|
||||
|
||||
test.serial('Throw a SemanticReleaseError if the "tagFormat" is not valid', async t => {
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
test('Throw a SemanticReleaseError if the "tagFormat" is not valid', async t => {
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
const options = {repositoryUrl, tagFormat: `?\${version}`};
|
||||
|
||||
const errors = [...(await t.throws(verify(options, 'master', t.context.logger)))];
|
||||
const errors = [...(await t.throws(verify({cwd, options})))];
|
||||
|
||||
t.is(errors[0].name, 'SemanticReleaseError');
|
||||
t.is(errors[0].code, 'EINVALIDTAGFORMAT');
|
||||
});
|
||||
|
||||
test.serial('Throw a SemanticReleaseError if the "tagFormat" does not contains the "version" variable', async t => {
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
test('Throw a SemanticReleaseError if the "tagFormat" does not contains the "version" variable', async t => {
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
const options = {repositoryUrl, tagFormat: 'test'};
|
||||
|
||||
const errors = [...(await t.throws(verify(options, 'master', t.context.logger)))];
|
||||
const errors = [...(await t.throws(verify({cwd, options})))];
|
||||
|
||||
t.is(errors[0].name, 'SemanticReleaseError');
|
||||
t.is(errors[0].code, 'ETAGNOVERSION');
|
||||
});
|
||||
|
||||
test.serial('Throw a SemanticReleaseError if the "tagFormat" contains multiple "version" variables', async t => {
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
test('Throw a SemanticReleaseError if the "tagFormat" contains multiple "version" variables', async t => {
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
const options = {repositoryUrl, tagFormat: `\${version}v\${version}`};
|
||||
|
||||
const errors = [...(await t.throws(verify(options)))];
|
||||
const errors = [...(await t.throws(verify({cwd, options})))];
|
||||
|
||||
t.is(errors[0].name, 'SemanticReleaseError');
|
||||
t.is(errors[0].code, 'ETAGNOVERSION');
|
||||
});
|
||||
|
||||
test.serial('Return "true" if all verification pass', async t => {
|
||||
const repositoryUrl = await gitRepo(true);
|
||||
const options = {repositoryUrl, tagFormat: `v\${version}`, branch: 'master'};
|
||||
test('Return "true" if all verification pass', async t => {
|
||||
const {cwd, repositoryUrl} = await gitRepo(true);
|
||||
const options = {repositoryUrl, tagFormat: `v\${version}`};
|
||||
|
||||
await t.notThrows(verify(options));
|
||||
await t.notThrows(verify({cwd, options}));
|
||||
});
|
||||
|
Loading…
x
Reference in New Issue
Block a user