refactor: Use ES6, Test with AVA

- Use async/await instead of callbacks
- Use execa to run command line
- Use AVA for tests
- Add several assertions in the unit tests
- Add documentation (comments) in the tests
- Run tests with a real git repo instead of mocking child_process and add test helpers to create repos, commits and checkout
- Simplify test directory structure
- Simplify code readability (mostly with async/await)
- Use eslint for for linting, prettier for formatting
This commit is contained in:
Pierre-Denis Vanduynslager 2017-09-29 16:39:20 -04:00 committed by Pierre Vanduynslager
parent 7fe0890350
commit abf92ad03d
51 changed files with 1362 additions and 1132 deletions

View File

@ -3,7 +3,7 @@
// Bad news: We have to write plain ES5 in this file
// Good news: It's the only file of the entire project
var semver = require('semver')
var semver = require('semver');
if (semver.lt(process.version, '8.0.0')) {
console.error(
@ -17,9 +17,10 @@ compatibility with minimal overhead:
$ npx -p node@8 npm run semantic-release
npx is bundled with npm >= 5.4, or available via npm. More info: npm.im/npx`)
process.exit(1)
npx is bundled with npm >= 5.4, or available via npm. More info: npm.im/npx`
);
process.exit(1);
}
// node 8+ from this point on
require('../src')
require('../src')();

View File

@ -1,6 +1,6 @@
{
"name": "semantic-release",
"description": "automated semver compliant package publishing",
"description": "Automated semver compliant package publishing",
"version": "0.0.0-placeholder",
"author": "Stephan Bönnemann <stephan@boennemann.me> (http://boennemann.me)",
"bin": {
@ -20,38 +20,60 @@
"@semantic-release/error": "^2.0.0",
"@semantic-release/last-release-npm": "^2.0.0",
"@semantic-release/release-notes-generator": "^4.0.0",
"execa": "^0.8.0",
"fs-extra": "^4.0.2",
"git-head": "^1.2.1",
"github": "^8.0.0",
"github": "^11.0.0",
"lodash": "^4.0.0",
"nerf-dart": "^1.0.0",
"nopt": "^4.0.0",
"normalize-package-data": "^2.3.4",
"npmconf": "^2.1.2",
"npmlog": "^4.0.0",
"p-series": "^1.0.0",
"parse-github-repo-url": "^1.3.0",
"require-relative": "^0.8.7",
"run-auto": "^2.0.0",
"run-series": "^1.1.3",
"semver": "^5.2.0"
"semver": "^5.4.1"
},
"devDependencies": {
"coveralls": "^3.0.0",
"ava": "^0.22.0",
"commitizen": "^2.9.6",
"cz-conventional-changelog": "^2.0.0",
"mkdirp": "^0.5.1",
"mock-spawn": "^0.2.6",
"nixt": "^0.5.0",
"eslint": "^4.7.0",
"eslint-config-prettier": "^2.5.0",
"eslint-config-standard": "^10.2.1",
"eslint-plugin-import": "^2.7.0",
"eslint-plugin-node": "^5.2.0",
"eslint-plugin-prettier": "^2.3.0",
"eslint-plugin-promise": "^3.5.0",
"eslint-plugin-standard": "^3.0.1",
"nock": "^9.0.2",
"npm-registry-couchapp": "^2.6.12",
"nyc": "^10.0.0",
"nyc": "^11.2.1",
"p-map-series": "^1.0.0",
"prettier": "^1.7.0",
"proxyquire": "^1.7.3",
"rimraf": "^2.5.0",
"standard": "^9.0.0",
"tap": "^10.0.1"
"sinon": "^4.0.0",
"tempy": "^0.2.1"
},
"engines": {
"node": ">=4",
"npm": ">=2"
},
"eslintConfig": {
"extends": [
"standard",
"prettier"
],
"plugins": [
"prettier"
],
"rules": {
"prettier/prettier": 2
}
},
"files": [
"bin",
"src"
@ -70,6 +92,23 @@
],
"license": "MIT",
"main": "bin/semantic-release.js",
"nyc": {
"include": [
"src/**/*.js"
],
"reporter": [
"json",
"text",
"html"
],
"all": true
},
"prettier": {
"printWidth": 120,
"singleQuote": true,
"bracketSpacing": false,
"trailingComma": "es5"
},
"publishConfig": {
"tag": "next"
},
@ -83,10 +122,11 @@
"scripts": {
"coverage": "nyc report",
"coverage:upload": "npm run coverage -s -- --reporter=text-lcov | coveralls",
"pretest": "standard",
"clean": "rimraf coverage && rimraf .nyc_output",
"cm": "git-cz",
"lint": "eslint .",
"pretest": "npm run clean && npm run lint",
"semantic-release": "./bin/semantic-release.js pre && npm publish && ./bin/semantic-release.js post",
"test": "npm run test:unit && npm run test:integration",
"test:integration": "tap --no-cov test/scenarios/*.js",
"test:unit": "nyc tap --no-cov test/specs/*.js"
"test": "nyc ava -v"
}
}

View File

@ -1,19 +1,20 @@
var fs = require('fs')
var path = require('path')
var url = require('url')
const path = require('path');
const {promisify} = require('util');
const url = require('url');
const {readJson, writeJson} = require('fs-extra');
const {cloneDeep, defaults, mapKeys, camelCase, assign} = require('lodash');
const log = require('npmlog');
const nopt = require('nopt');
const npmconf = require('npmconf');
const normalizeData = require('normalize-package-data');
var _ = require('lodash')
var log = require('npmlog')
var nopt = require('nopt')
var npmconf = require('npmconf')
var normalizeData = require('normalize-package-data')
log.heading = 'semantic-release'
var env = process.env
var pkg = JSON.parse(fs.readFileSync('./package.json'))
var originalPkg = _.cloneDeep(pkg)
normalizeData(pkg)
var knownOptions = {
module.exports = async () => {
log.heading = 'semantic-release';
const env = process.env;
const pkg = await readJson('./package.json');
const originalPkg = cloneDeep(pkg);
normalizeData(pkg);
const knownOptions = {
branch: String,
debug: Boolean,
'github-token': String,
@ -21,143 +22,132 @@ var knownOptions = {
'analyze-commits': [path, String],
'generate-notes': [path, String],
'verify-conditions': [path, String],
'verify-release': [path, String]
}
var options = _.defaults(
_.mapKeys(nopt(knownOptions), function (value, key) {
return _.camelCase(key)
'verify-release': [path, String],
};
const options = defaults(
mapKeys(nopt(knownOptions), (value, key) => {
return camelCase(key);
}),
pkg.release,
{
branch: 'master',
fallbackTags: {
next: 'latest'
},
fallbackTags: {next: 'latest'},
debug: !env.CI,
githubToken: env.GH_TOKEN || env.GITHUB_TOKEN,
githubUrl: env.GH_URL
githubUrl: env.GH_URL,
}
)
var plugins = require('../src/lib/plugins')(options)
npmconf.load({}, function (err, conf) {
if (err) {
log.error('init', 'Failed to load npm config.', err)
process.exit(1)
);
const plugins = require('../src/lib/plugins')(options);
let conf;
try {
conf = await promisify(npmconf.load)({});
} catch (err) {
log.error('init', 'Failed to load npm config.', err);
process.exit(1);
}
var npm = {
auth: {
token: env.NPM_TOKEN
},
const npm = {
auth: {token: env.NPM_TOKEN},
cafile: conf.get('cafile'),
loglevel: conf.get('loglevel'),
registry: require('../src/lib/get-registry')(pkg, conf),
tag: (pkg.publishConfig || {}).tag || conf.get('tag') || 'latest'
}
tag: (pkg.publishConfig || {}).tag || conf.get('tag') || 'latest',
};
// normalize trailing slash
npm.registry = url.format(url.parse(npm.registry))
npm.registry = url.format(url.parse(npm.registry));
log.level = npm.loglevel;
log.level = npm.loglevel
const config = {env: env, pkg: pkg, options: options, plugins: plugins, npm: npm};
const hide = {};
if (options.githubToken) hide.githubToken = '***';
var config = {
env: env,
pkg: pkg,
options: options,
plugins: plugins,
npm: npm
}
log.verbose('init', 'options:', assign({}, options, hide));
log.verbose('init', 'Verifying config.');
var hide = {}
if (options.githubToken) hide.githubToken = '***'
log.verbose('init', 'options:', _.assign({}, options, hide))
log.verbose('init', 'Verifying config.')
var errors = require('../src/lib/verify')(config)
errors.forEach(function (err) {
log.error('init', err.message + ' ' + err.code)
})
if (errors.length) process.exit(1)
const errors = require('../src/lib/verify')(config);
errors.forEach(err => {
log.error('init', err.message + ' ' + err.code);
});
if (errors.length) process.exit(1);
if (options.argv.remain[0] === 'pre') {
log.verbose('pre', 'Running pre-script.')
log.verbose('pre', 'Veriying conditions.')
plugins.verifyConditions(config, function (err) {
if (err) {
log[options.debug ? 'warn' : 'error']('pre', err.message)
if (!options.debug) process.exit(1)
log.verbose('pre', 'Running pre-script.');
log.verbose('pre', 'Veriying conditions.');
try {
await promisify(plugins.verifyConditions)(config);
} catch (err) {
log[options.debug ? 'warn' : 'error']('pre', err.message);
if (!options.debug) process.exit(1);
}
var nerfDart = require('nerf-dart')(npm.registry)
var wroteNpmRc = false
const nerfDart = require('nerf-dart')(npm.registry);
let wroteNpmRc = false;
if (env.NPM_OLD_TOKEN && env.NPM_EMAIL) {
// Using the old auth token format is not considered part of the public API
// This might go away anytime (i.e. once we have a better testing strategy)
conf.set('_auth', '${NPM_OLD_TOKEN}', 'project') // eslint-disable-line no-template-curly-in-string
conf.set('email', '${NPM_EMAIL}', 'project') // eslint-disable-line no-template-curly-in-string
wroteNpmRc = true
conf.set('_auth', '${NPM_OLD_TOKEN}', 'project'); // eslint-disable-line no-template-curly-in-string
conf.set('email', '${NPM_EMAIL}', 'project'); // eslint-disable-line no-template-curly-in-string
wroteNpmRc = true;
} else if (env.NPM_TOKEN) {
conf.set(nerfDart + ':_authToken', '${NPM_TOKEN}', 'project') // eslint-disable-line no-template-curly-in-string
wroteNpmRc = true
}
conf.save('project', function (err) {
if (err) return log.error('pre', 'Failed to save npm config.', err)
if (wroteNpmRc) log.verbose('pre', 'Wrote authToken to .npmrc.')
require('../src/pre')(config, function (err, release) {
if (err) {
log.error('pre', 'Failed to determine new version.')
var args = ['pre', (err.code ? err.code + ' ' : '') + err.message]
if (err.stack) args.push(err.stack)
log.error.apply(log, args)
process.exit(1)
}
var message = 'Determined version ' + release.version + ' as "' + npm.tag + '".'
log.verbose('pre', message)
if (options.debug) {
log.error('pre', message + ' Not publishing in debug mode.', release)
process.exit(1)
conf.set(nerfDart + ':_authToken', '${NPM_TOKEN}', 'project'); // eslint-disable-line no-template-curly-in-string
wroteNpmRc = true;
}
try {
var shrinkwrap = JSON.parse(fs.readFileSync('./npm-shrinkwrap.json'))
shrinkwrap.version = release.version
fs.writeFileSync('./npm-shrinkwrap.json', JSON.stringify(shrinkwrap, null, 2))
log.verbose('pre', 'Wrote version ' + release.version + 'to npm-shrinkwrap.json.')
await promisify(conf.save.bind(conf))('project');
} catch (err) {
return log.error('pre', 'Failed to save npm config.', err);
}
if (wroteNpmRc) log.verbose('pre', 'Wrote authToken to .npmrc.');
let release;
try {
release = await require('../src/pre')(config);
} catch (err) {
log.error('pre', 'Failed to determine new version.');
const args = ['pre', (err.code ? err.code + ' ' : '') + err.message];
if (err.stack) args.push(err.stack);
log.error.apply(log, args);
process.exit(1);
}
const message = 'Determined version ' + release.version + ' as "' + npm.tag + '".';
log.verbose('pre', message);
if (options.debug) {
log.error('pre', message + ' Not publishing in debug mode.', release);
process.exit(1);
}
try {
const shrinkwrap = await readJson('./npm-shrinkwrap.json');
shrinkwrap.version = release.version;
await writeJson('./npm-shrinkwrap.json', shrinkwrap);
log.verbose('pre', 'Wrote version ' + release.version + 'to npm-shrinkwrap.json.');
} catch (e) {
log.silly('pre', 'Couldn\'t find npm-shrinkwrap.json.')
log.silly('pre', "Couldn't find npm-shrinkwrap.json.");
}
fs.writeFileSync('./package.json', JSON.stringify(_.assign(originalPkg, {
version: release.version
}), null, 2))
await writeJson('./package.json', assign(originalPkg, {version: release.version}));
log.verbose('pre', 'Wrote version ' + release.version + ' to package.json.')
})
})
})
log.verbose('pre', 'Wrote version ' + release.version + ' to package.json.');
} else if (options.argv.remain[0] === 'post') {
log.verbose('post', 'Running post-script.')
log.verbose('post', 'Running post-script.');
require('../src/post')(config, function (err, published, release) {
if (err) {
log.error('post', 'Failed to publish release notes.', err)
process.exit(1)
let published, release;
try {
({published, release} = await require('../src/post')(config));
log.verbose('post', (published ? 'Published' : 'Generated') + ' release notes.', release);
} catch (err) {
log.error('post', 'Failed to publish release notes.', err);
process.exit(1);
}
log.verbose('post', (published ? 'Published' : 'Generated') + ' release notes.', release)
})
} else {
log.error('post', 'Command "' + options.argv.remain[0] + '" not recognized. Use either "pre" or "post"')
log.error('post', 'Command "' + options.argv.remain[0] + '" not recognized. Use either "pre" or "post"');
}
})
};

View File

@ -1,79 +0,0 @@
var childProcess = require('child_process')
var log = require('npmlog')
var SemanticReleaseError = require('@semantic-release/error')
module.exports = function (config, cb) {
var lastRelease = config.lastRelease
var options = config.options
var branch = options.branch
var from = lastRelease.gitHead
var range = (from ? from + '..' : '') + 'HEAD'
if (!from) return extract()
childProcess.exec('git branch --no-color --contains ' + from, function (err, stdout) {
var inHistory = false
var branches
if (!err && stdout) {
branches = stdout.split('\n')
.map(function (result) {
if (branch === result.replace('*', '').trim()) {
inHistory = true
return null
}
return result.trim()
})
.filter(function (branch) {
return !!branch
})
}
if (!inHistory) {
log.error('commits',
'The commit the last release of this package was derived from is not in the direct history of the "' + branch + '" branch.\n' +
'This means semantic-release can not extract the commits between now and then.\n' +
'This is usually caused by force pushing, releasing from an unrelated branch, or using an already existing package name.\n' +
'You can recover from this error by publishing manually or restoring the commit "' + from + '".' + (branches && branches.length
? '\nHere is a list of branches that still contain the commit in question: \n * ' + branches.join('\n * ')
: ''
))
return cb(new SemanticReleaseError('Commit not in history', 'ENOTINHISTORY'))
}
extract()
})
function extract () {
var child = childProcess.spawn('git', ['log', '-E', '--format=%H==SPLIT==%B==END==', range])
var stdout = ''
var err = ''
child.stdout.on('data', function (data) {
stdout += data
})
child.stderr.on('data', function (data) {
err += data
})
child.on('close', function (code) {
if (err || code) return cb(err)
cb(null, String(stdout).split('==END==\n')
.filter(function (raw) {
return !!raw.trim()
})
.map(function (raw) {
var data = raw.split('==SPLIT==')
return {
hash: data[0],
message: data[1]
}
})
)
})
}
}

56
src/lib/get-commits.js Normal file
View File

@ -0,0 +1,56 @@
const execa = require('execa');
const log = require('npmlog');
const SemanticReleaseError = require('@semantic-release/error');
module.exports = async ({lastRelease, options}) => {
let stdout;
if (lastRelease.gitHead) {
try {
({stdout} = await execa('git', ['branch', '--no-color', '--contains', lastRelease.gitHead]));
} catch (err) {
throw notInHistoryError(lastRelease.gitHead, options.branch);
}
const branches = stdout
.split('\n')
.map(branch => branch.replace('*', '').trim())
.filter(branch => !!branch);
if (!branches.includes(options.branch)) {
throw notInHistoryError(lastRelease.gitHead, options.branch, branches);
}
}
try {
({stdout} = await execa('git', [
'log',
'--format=%H==SPLIT==%B==END==',
`${lastRelease.gitHead ? lastRelease.gitHead + '..' : ''}HEAD`,
]));
} catch (err) {
return [];
}
return String(stdout)
.split('==END==')
.filter(raw => !!raw.trim())
.map(raw => {
const [hash, message] = raw.trim().split('==SPLIT==');
return {hash, message};
});
};
function notInHistoryError(gitHead, branch, branches) {
log.error(
'commits',
`
The commit the last release of this package was derived from is not in the direct history of the "${branch}" branch.
This means semantic-release can not extract the commits between now and then.
This is usually caused by force pushing, releasing from an unrelated branch, or using an already existing package name.
You can recover from this error by publishing manually or restoring the commit "${gitHead}".
${branches && branches.length
? `\nHere is a list of branches that still contain the commit in question: \n * ${branches.join('\n * ')}`
: ''}
`
);
return new SemanticReleaseError('Commit not in history', 'ENOTINHISTORY');
}

View File

@ -1,12 +1,11 @@
module.exports = function (pkg, conf) {
if (pkg.publishConfig && pkg.publishConfig.registry) return pkg.publishConfig.registry
if (pkg.name[0] !== '@') return conf.get('registry') || 'https://registry.npmjs.org/'
var scope = pkg.name.split('/')[0]
var scopedRegistry = conf.get(scope + '/registry')
if (scopedRegistry) return scopedRegistry
return conf.get('registry') || 'https://registry.npmjs.org/'
module.exports = ({publishConfig, name}, conf) => {
if (publishConfig && publishConfig.registry) {
return publishConfig.registry;
}
if (name[0] !== '@') {
return conf.get('registry') || 'https://registry.npmjs.org/';
}
return conf.get(`${name.split('/')[0]}/registry`) || conf.get('registry') || 'https://registry.npmjs.org/';
};

View File

@ -0,0 +1,14 @@
const {promisify} = require('util');
const SemanticReleaseError = require('@semantic-release/error');
module.exports = async config => {
const {plugins, lastRelease} = config;
const type = await promisify(plugins.analyzeCommits)(config);
if (!type) {
throw new SemanticReleaseError('There are no relevant changes, so no new version is released.', 'ENOCHANGE');
}
if (!lastRelease.version) return 'initial';
return type;
};

View File

@ -1,4 +1,3 @@
/* istanbul ignore next */
module.exports = function (config, options, cb) {
cb(null)
}
module.exports = (config, options, cb) => {
cb(null);
};

View File

@ -1,42 +1,42 @@
var relative = require('require-relative')
var series = require('run-series')
const {promisify} = require('util');
const relative = require('require-relative');
const pSeries = require('p-series');
var exports = module.exports = function (options) {
var plugins = {
analyzeCommits: exports.normalize(options.analyzeCommits, '@semantic-release/commit-analyzer'),
generateNotes: exports.normalize(options.generateNotes, '@semantic-release/release-notes-generator'),
getLastRelease: exports.normalize(options.getLastRelease, '@semantic-release/last-release-npm')
}
;['verifyConditions', 'verifyRelease'].forEach(function (plugin) {
module.exports = options => {
const plugins = {
analyzeCommits: normalize(options.analyzeCommits, '@semantic-release/commit-analyzer'),
generateNotes: normalize(options.generateNotes, '@semantic-release/release-notes-generator'),
getLastRelease: normalize(options.getLastRelease, '@semantic-release/last-release-npm'),
};
['verifyConditions', 'verifyRelease'].forEach(plugin => {
if (!Array.isArray(options[plugin])) {
plugins[plugin] = exports.normalize(
plugins[plugin] = normalize(
options[plugin],
plugin === 'verifyConditions'
? '@semantic-release/condition-travis'
: './plugin-noop'
)
return
plugin === 'verifyConditions' ? '@semantic-release/condition-travis' : './plugin-noop'
);
return;
}
plugins[plugin] = function (pluginOptions, cb) {
var tasks = options[plugin].map(function (step) {
return exports.normalize(step, './plugin-noop').bind(null, pluginOptions)
plugins[plugin] = async pluginOptions => {
return pSeries(
options[plugin].map(step => {
return () => promisify(normalize(step, './plugin-noop'))(pluginOptions);
})
);
};
});
series(tasks, cb)
}
})
return plugins;
};
return plugins
const normalize = (pluginConfig, fallback) => {
if (typeof pluginConfig === 'string') return relative(pluginConfig).bind(null, {});
if (pluginConfig && typeof pluginConfig.path === 'string') {
return relative(pluginConfig.path).bind(null, pluginConfig);
}
exports.normalize = function (pluginConfig, fallback) {
if (typeof pluginConfig === 'string') return relative(pluginConfig).bind(null, {})
return require(fallback).bind(null, pluginConfig);
};
if (pluginConfig && (typeof pluginConfig.path === 'string')) {
return relative(pluginConfig.path).bind(null, pluginConfig)
}
return require(fallback).bind(null, pluginConfig)
}
module.exports.normalize = normalize;

View File

@ -1,21 +0,0 @@
var SemanticReleaseError = require('@semantic-release/error')
module.exports = function (config, cb) {
var plugins = config.plugins
var lastRelease = config.lastRelease
plugins.analyzeCommits(config, function (err, type) {
if (err) return cb(err)
if (!type) {
return cb(new SemanticReleaseError(
'There are no relevant changes, so no new version is released.',
'ENOCHANGE'
))
}
if (!lastRelease.version) return cb(null, 'initial')
cb(null, type)
})
}

View File

@ -1,40 +1,25 @@
var SemanticReleaseError = require('@semantic-release/error')
const SemanticReleaseError = require('@semantic-release/error');
module.exports = function (config) {
var pkg = config.pkg
var options = config.options
var env = config.env
var errors = []
module.exports = ({pkg, options, env}) => {
const errors = [];
if (!pkg.name) {
errors.push(new SemanticReleaseError(
'No "name" found in package.json.',
'ENOPKGNAME'
))
errors.push(new SemanticReleaseError('No "name" found in package.json.', 'ENOPKGNAME'));
}
if (!pkg.repository || !pkg.repository.url) {
errors.push(new SemanticReleaseError(
'No "repository" found in package.json.',
'ENOPKGREPO'
))
errors.push(new SemanticReleaseError('No "repository" found in package.json.', 'ENOPKGREPO'));
}
if (options.debug) return errors
if (!options.debug) {
if (!options.githubToken) {
errors.push(new SemanticReleaseError(
'No github token specified.',
'ENOGHTOKEN'
))
errors.push(new SemanticReleaseError('No github token specified.', 'ENOGHTOKEN'));
}
if (!(env.NPM_TOKEN || (env.NPM_OLD_TOKEN && env.NPM_EMAIL))) {
errors.push(new SemanticReleaseError(
'No npm token specified.',
'ENONPMTOKEN'
))
errors.push(new SemanticReleaseError('No npm token specified.', 'ENONPMTOKEN'));
}
}
return errors
}
return errors;
};

View File

@ -1,71 +1,38 @@
var url = require('url')
const {promisify} = require('util');
const url = require('url');
const gitHead = require('git-head');
const GitHubApi = require('github');
const parseSlug = require('parse-github-repo-url');
var gitHead = require('git-head')
var GitHubApi = require('github')
var parseSlug = require('parse-github-repo-url')
module.exports = async config => {
const {pkg, options: {branch, debug, githubUrl, githubToken, githubApiPathPrefix}, plugins} = config;
const [owner, repo] = parseSlug(pkg.repository.url);
const name = `v${pkg.version}`;
const tag = {owner, repo, ref: `refs/tags/${name}`, sha: await promisify(gitHead)()};
const body = await promisify(plugins.generateNotes)(config);
const release = {owner, repo, tag_name: name, name, target_commitish: branch, draft: !!debug, body};
module.exports = function (config, cb) {
var pkg = config.pkg
var options = config.options
var plugins = config.plugins
var ghConfig = options.githubUrl ? url.parse(options.githubUrl) : {}
var github = new GitHubApi({
port: ghConfig.port,
protocol: (ghConfig.protocol || '').split(':')[0] || null,
host: ghConfig.hostname,
pathPrefix: options.githubApiPathPrefix || null
})
plugins.generateNotes(config, function (err, log) {
if (err) return cb(err)
gitHead(function (err, hash) {
if (err) return cb(err)
var ghRepo = parseSlug(pkg.repository.url)
var tag = {
owner: ghRepo[0],
repo: ghRepo[1],
ref: 'refs/tags/v' + pkg.version,
sha: hash
}
var release = {
owner: ghRepo[0],
repo: ghRepo[1],
tag_name: 'v' + pkg.version,
name: 'v' + pkg.version,
target_commitish: options.branch,
draft: !!options.debug,
body: log
if (debug && !githubToken) {
return {published: false, release};
}
if (options.debug && !options.githubToken) {
return cb(null, false, release)
const {port, protocol, hostname} = githubUrl ? url.parse(githubUrl) : {};
const github = new GitHubApi({
port,
protocol: (protocol || '').split(':')[0] || null,
host: hostname,
pathPrefix: githubApiPathPrefix || null,
});
github.authenticate({type: 'token', token: githubToken});
if (debug) {
await github.repos.createRelease(release);
return {published: true, release};
}
github.authenticate({
type: 'token',
token: options.githubToken
})
await github.gitdata.createReference(tag);
await github.repos.createRelease(release);
if (options.debug) {
return github.repos.createRelease(release, function (err) {
if (err) return cb(err)
cb(null, true, release)
})
}
github.gitdata.createReference(tag, function (err) {
if (err) return cb(err)
github.repos.createRelease(release, function (err) {
if (err) return cb(err)
cb(null, true, release)
})
})
})
})
}
return {published: true, release};
};

View File

@ -1,45 +1,23 @@
var _ = require('lodash')
var auto = require('run-auto')
var semver = require('semver')
const {promisify} = require('util');
const {assign} = require('lodash');
const semver = require('semver');
var getCommits = require('./lib/commits')
var getType = require('./lib/type')
const getCommits = require('./lib/get-commits');
const getReleaseType = require('./lib/get-release-type');
module.exports = function (config, cb) {
var plugins = config.plugins
module.exports = async config => {
const {getLastRelease, verifyRelease} = config.plugins;
auto({
lastRelease: plugins.getLastRelease.bind(null, config),
commits: ['lastRelease', function (results, cb) {
getCommits(_.assign({
lastRelease: results.lastRelease
}, config),
cb)
}],
type: ['commits', 'lastRelease', function (results, cb) {
getType(_.assign({
commits: results.commits,
lastRelease: results.lastRelease
}, config),
cb)
}]
}, function (err, results) {
if (err) return cb(err)
const lastRelease = await promisify(getLastRelease)(config);
const commits = await getCommits(assign({lastRelease}, config));
const type = await getReleaseType(assign({commits, lastRelease}, config));
var nextRelease = {
type: results.type,
version: results.type === 'initial'
? '1.0.0'
: semver.inc(results.lastRelease.version, results.type)
}
const nextRelease = {
type: type,
version: type === 'initial' ? '1.0.0' : semver.inc(lastRelease.version, type),
};
plugins.verifyRelease(_.assign({
commits: results.commits,
lastRelease: results.lastRelease,
nextRelease: nextRelease
}, config), function (err) {
if (err) return cb(err)
cb(null, nextRelease)
})
})
}
await promisify(verifyRelease)(assign({commits, lastRelease, nextRelease}, config));
return nextRelease;
};

3
test/fixtures/plugin-error-a.js vendored Normal file
View File

@ -0,0 +1,3 @@
module.exports = function(config, options, cb) {
cb(new Error('a'));
};

3
test/fixtures/plugin-error-b.js vendored Normal file
View File

@ -0,0 +1,3 @@
module.exports = function(config, options, cb) {
cb(new Error('b'));
};

3
test/fixtures/plugin-result-a.js vendored Normal file
View File

@ -0,0 +1,3 @@
module.exports = function(config, options, cb) {
cb(null, 'a');
};

3
test/fixtures/plugin-result-b.js vendored Normal file
View File

@ -0,0 +1,3 @@
module.exports = function(config, options, cb) {
cb(null, 'b');
};

3
test/fixtures/plugin-result-config.js vendored Normal file
View File

@ -0,0 +1,3 @@
module.exports = function(pluginConfig, options, cb) {
cb(null, {pluginConfig, options});
};

130
test/get-commits.test.js Normal file
View File

@ -0,0 +1,130 @@
import test from 'ava';
import {gitRepo, gitCommits, gitCheckout} from './helpers/git-utils';
import proxyquire from 'proxyquire';
import {stub} from 'sinon';
import SemanticReleaseError from '@semantic-release/error';
// Stub to capture the log messages
const errorLog = stub();
// Module to test
const getCommits = proxyquire('../src/lib/get-commits', {npmlog: {error: errorLog}});
test.beforeEach(t => {
// Save the current working diretory
t.context.cwd = process.cwd();
// Reset the stub call history
errorLog.resetHistory();
});
test.afterEach.always(t => {
// Restore the current working directory
process.chdir(t.context.cwd);
});
test.serial('Get all commits when there is no last release', async t => {
// Create a git repository, set the current working directory at the root of the repo
await gitRepo();
// Add commits to the master branch
const commits = await gitCommits(['fix: First fix', 'feat: Second feature']);
// Retrieve the commits with the commits module
const result = await getCommits({lastRelease: {}, options: {branch: 'master'}});
// The commits created and and retrieved by the module are identical
t.is(result.length, 2);
t.is(result[0].hash.substring(0, 7), commits[0].hash);
t.is(result[0].message, commits[0].message);
t.is(result[1].hash.substring(0, 7), commits[1].hash);
t.is(result[1].message, commits[1].message);
});
test.serial('Get all commits since lastRelease gitHead', async t => {
// Create a git repository, set the current working directory at the root of the repo
await gitRepo();
// Add commits to the master branch
const commits = await gitCommits(['fix: First fix', 'feat: Second feature', 'feat: Third feature']);
// Retrieve the commits with the commits module
const result = await getCommits({
lastRelease: {gitHead: commits[commits.length - 1].hash},
options: {branch: 'master'},
});
// The commits created and retrieved by the module are identical
t.is(result.length, 2);
t.is(result[0].hash.substring(0, 7), commits[0].hash);
t.is(result[0].message, commits[0].message);
t.is(result[1].hash.substring(0, 7), commits[1].hash);
t.is(result[1].message, commits[1].message);
});
test.serial('Return empty array if there is no commits', async t => {
// Create a git repository, set the current working directory at the root of the repo
await gitRepo();
// Add commits to the master branch
const commits = await gitCommits(['fix: First fix', 'feat: Second feature']);
// Retrieve the commits with the commits module
const result = await getCommits({lastRelease: {gitHead: commits[0].hash}, options: {branch: 'master'}});
// Verify no commit is retrieved
t.deepEqual(result, []);
});
test.serial('Return empty array if lastRelease.gitHead is the last commit', async t => {
// Create a git repository, set the current working directory at the root of the repo
await gitRepo();
// Retrieve the commits with the commits module
const result = await getCommits({lastRelease: {}, options: {branch: 'master'}});
// Verify no commit is retrieved
t.deepEqual(result, []);
});
test.serial('Throws ENOTINHISTORY error if gitHead is not in history', async t => {
// Create a git repository, set the current working directory at the root of the repo
await gitRepo();
// Add commits to the master branch
await gitCommits(['fix: First fix', 'feat: Second feature']);
// Retrieve the commits with the commits module
const error = await t.throws(getCommits({lastRelease: {gitHead: 'notinhistory'}, options: {branch: 'master'}}));
// Verify error code and message
t.is(error.code, 'ENOTINHISTORY');
t.true(error instanceof SemanticReleaseError);
// Verify the log function has been called with a message mentionning the branch
t.regex(errorLog.firstCall.args[1], /history of the "master" branch/);
// Verify the log function has been called with a message mentionning the missing gitHead
t.regex(errorLog.firstCall.args[1], /restoring the commit "notinhistory"/);
});
test.serial('Throws ENOTINHISTORY error if gitHead is not in branch history but present in others', async t => {
// Create a git repository, set the current working directory at the root of the repo
await gitRepo();
// Add commits to the master branch
await gitCommits(['First', 'Second']);
// Create the new branch 'other-branch' from master
await gitCheckout('other-branch', true);
// Add commits to the 'other-branch' branch
const commitsBranch = await gitCommits(['Third', 'Fourth']);
// Create the new branch 'another-branch' from 'other-branch'
await gitCheckout('another-branch', true);
// Retrieve the commits with the commits module
const error = await t.throws(
getCommits({lastRelease: {version: '1.0.1', gitHead: commitsBranch[0].hash}, options: {branch: 'master'}})
);
// Verify error code and message
t.is(error.code, 'ENOTINHISTORY');
t.true(error instanceof SemanticReleaseError);
// Verify the log function has been called with a message mentionning the branch
t.regex(errorLog.firstCall.args[1], /history of the "master" branch/);
// Verify the log function has been called with a message mentionning the missing gitHead
t.regex(errorLog.firstCall.args[1], new RegExp(`restoring the commit "${commitsBranch[0].hash}"`));
// Verify the log function has been called with a message mentionning the branches that contains the gitHead
t.regex(errorLog.firstCall.args[1], /\* another-branch\s+\* other-branch/);
});

82
test/get-registry.test.js Normal file
View File

@ -0,0 +1,82 @@
import test from 'ava';
import {stub} from 'sinon';
const getRegistry = require('../src/lib/get-registry');
test('Get registry from package.json', t => {
// Retrieve the registry with the get-registry module and verify it returns the one from the package.json in parameter
t.is(getRegistry({name: 'publish-config', publishConfig: {registry: 'a'}}, {}), 'a');
});
test('Prioritize the package.json registry config', t => {
// Stub the npmconf object
const get = stub();
// Retrieve the registry with the get-registry module and verify it returns the one from the package.json in parameter
t.is(getRegistry({name: 'publish-config', publishConfig: {registry: 'b'}}, {get}), 'b');
// Verify the registry has been retrieved from the package.json without trying the stubbed npmconf
t.true(get.notCalled);
});
test('Get registry for regular package name', t => {
// Stub the npmconf object returns 'b' for 'registry' property
const get = stub()
.withArgs('registry')
.returns('b');
// Retrieve the registry with the get-registry module and verify it returns the one configured in the stubbed npmconf
t.is(getRegistry({name: 'normal'}, {get}), 'b');
// Verify the registry has been retrieved by calling the stubbed npmconf
t.true(get.calledWithExactly('registry'));
});
test('Get default registry', t => {
// Stub the npmconf object, returns 'null'
const get = stub().returns(null);
// Retrieve the registry with the get-registry module and verify it returns default one
t.is(getRegistry({name: 'normal'}, {get}), 'https://registry.npmjs.org/');
// Verify the module tried first to retrieve the registry by calling the stubbed npmconf
t.true(get.calledWithExactly('registry'));
});
test('Get registry for scoped package name', t => {
// Stub the npmconf object, returns 'c' for '@scoped/registry' property
const get = stub()
.withArgs('@scoped/registry')
.returns('c');
// Retrieve the registry with the get-registry module and verify it returns the one configured in the stubbed npmconf
t.is(getRegistry({name: '@scoped/foo'}, {get}), 'c');
// Verify the registry for the scope '@scoped' has been retrieved by calling the stubbed npmconf
t.true(get.calledWithExactly('@scoped/registry'));
});
test('Get regular registry for scoped package name', t => {
// Stub the npmconf object, returns 'd' for 'registry' property
const get = stub()
.withArgs('registry')
.returns('d');
// Retrieve the registry with the get-registry module and verify it returns the regular default one for `@scoped` packages
t.is(getRegistry({name: '@scoped/baz'}, {get}), 'd');
// Verify the module tried to retrieve the @scoped registry by calling the stubbed npmconf
t.true(get.calledWithExactly('@scoped/registry'));
});
test('Get default registry for scoped package name', t => {
// Stub the npmconf object, returns 'd' for 'registry' property
const get = stub().returns(null);
// Retrieve the registry with the get-registry module and verify it returns default one for `@scoped` packages
t.is(getRegistry({name: '@scoped/baz'}, {get}), 'https://registry.npmjs.org/');
// Verify the module tried to retrieve the @scoped registry by calling the stubbed npmconf
t.true(get.calledWithExactly('@scoped/registry'));
// Verify the module tried to retrieve the regular registry by calling the stubbed npmconf
t.true(get.calledWithExactly('registry'));
});

View File

@ -0,0 +1,87 @@
import {callbackify} from 'util';
import test from 'ava';
import {stub} from 'sinon';
import SemanticReleaseError from '@semantic-release/error';
import getReleaseType from '../src/lib/get-release-type';
test('Get commit types from commits', async t => {
// Stub the commitAnalyzer plugin, returns 'major' release type
const analyzeCommits = stub().resolves('major');
const commits = [{hash: '0', message: 'a'}];
// Call the get-release-type module
const releaseType = await getReleaseType({
commits,
lastRelease: {version: '1.0.0'},
plugins: {analyzeCommits: callbackify(analyzeCommits)},
});
// Verify the module return the release type obtain from the commitAnalyzer plugin
t.is(releaseType, 'major');
// Verify the commitAnalyzer plugin was called with the commits
t.true(analyzeCommits.calledOnce);
t.deepEqual(analyzeCommits.firstCall.args[0].commits, commits);
});
test('Throws error when no changes', async t => {
// Stub the commitAnalyzer plugin, returns 'null' release type
const analyzeCommits = stub().resolves(null);
const commits = [{hash: '0', message: 'a'}];
// Call the get-release-type module and verify it returns an error
const error = await t.throws(
getReleaseType({
commits,
lastRelease: {version: '1.0.0'},
plugins: {analyzeCommits: callbackify(analyzeCommits)},
})
);
// Verify the error code adn type
t.is(error.code, 'ENOCHANGE');
t.true(error instanceof SemanticReleaseError);
// Verify the commitAnalyzer plugin was called with the commits
t.true(analyzeCommits.calledOnce);
t.deepEqual(analyzeCommits.firstCall.args[0].commits, commits);
});
test('Return initial if there is no lastRelease', async t => {
// Stub the commitAnalyzer plugin, returns 'major' release type
const analyzeCommits = stub().resolves('major');
const commits = [{hash: '0', message: 'a'}];
// Call the get-release-type module
const releaseType = await getReleaseType({
commits,
lastRelease: {},
plugins: {analyzeCommits: callbackify(analyzeCommits)},
});
// Verify the module return an initial release type
t.is(releaseType, 'initial');
// Verify the commitAnalyzer plugin was called with the commits
t.true(analyzeCommits.calledOnce);
t.deepEqual(analyzeCommits.firstCall.args[0].commits, commits);
});
test('Throws error when no changes even if there is no lastRelease', async t => {
// Stub the commitAnalyzer plugin, returns 'null' release type
const analyzeCommits = stub().resolves(null);
const commits = [{hash: '0', message: 'a'}];
// Call the get-release-type module and verify it returns an error
const error = await t.throws(
getReleaseType({commits, lastRelease: {}, plugins: {analyzeCommits: callbackify(analyzeCommits)}})
);
// Verify the error code adn type
t.is(error.code, 'ENOCHANGE');
t.true(error instanceof SemanticReleaseError);
// Verify the commitAnalyzer plugin was called with the commits
t.true(analyzeCommits.calledOnce);
t.deepEqual(analyzeCommits.firstCall.args[0].commits, commits);
});

61
test/helpers/git-utils.js Normal file
View File

@ -0,0 +1,61 @@
import {mkdir} from 'fs-extra';
import tempy from 'tempy';
import execa from 'execa';
import pMapSeries from 'p-map-series';
/**
* Commit message informations.
*
* @typedef {Object} Commit
* @property {string} branch The commit branch
* @property {string} hash The commit hash
* @property {string} message The commit message
*/
/**
* Create a temporary git repository.
*
* @method gitCommits
* @param {Array<Commit>} commits the created commits.
*/
export async function gitRepo() {
const dir = tempy.directory();
process.chdir(dir);
await mkdir('git-templates');
await execa('git', ['init', '--template=./git-templates']);
}
/**
* Create commits on the current git repository.
*
* @method gitCommits
* @param {Array<String>} messages commit messages
* @returns {Array<Commit>} commits the created commits, in reverse order (to match `git log` order)
*/
export async function gitCommits(messages) {
return (await pMapSeries(messages, async msg => {
const {stdout} = await execa('git', ['commit', '-m', msg, '--allow-empty', '--no-gpg-sign']);
const [, branch, hash, message] = /^\[(\w+)\(?.*?\)?(\w+)\] (.+)$/.exec(stdout);
return {branch, hash, message};
})).reverse();
}
/**
* Checkout a branch on the current git repository.
*
* @param {String} branch Branch name
* @param {Boolean} create `true` to create the branche ans switch, `false` to only switch
*/
export async function gitCheckout(branch, create) {
await execa('git', ['checkout', create ? '-b' : null, branch]);
}
/**
* Get the sha of the head commit in the current git repository.
*
* @return {String} The sha of the head commit in the current git repository.
*/
export async function gitHead() {
return (await execa('git', ['rev-parse', 'HEAD'])).stdout;
}

View File

@ -0,0 +1,7 @@
import nock from 'nock';
export function authenticate(
{githubToken = 'GH_TOKEN', githubUrl = 'https://api.github.com', githubApiPathPrefix = ''} = {}
) {
return nock(`${githubUrl}/${githubApiPathPrefix}`, {reqheaders: {Authorization: `token ${githubToken}`}});
}

View File

@ -0,0 +1,14 @@
import execa from 'execa';
const opts = {cwd: __dirname};
export const uri =
'http://localhost:' + (process.env.TRAVIS === 'true' ? 5984 : 15986) + '/registry/_design/app/_rewrite/';
export function start() {
return execa('./start.sh', opts);
}
export function stop() {
return execa('./stop.sh', opts);
}

View File

@ -2,6 +2,7 @@
database_dir = data
view_index_dir = data
delayed_commits = false
uuid = bf4ecd84a7c89d60b5b2540fdf8c322c
[couch_httpd_auth]
public_fields = appdotnet, avatar, avatarMedium, avatarLarge, date, email, fields, freenode, fullname, github, homepage, name, roles, twitter, type, _id, _rev

140
test/integration.test.js Normal file
View File

@ -0,0 +1,140 @@
import test from 'ava';
import {writeJson, readJson} from 'fs-extra';
import {start, stop, uri} from './helpers/registry';
import {gitRepo, gitCommits, gitHead} from './helpers/git-utils';
import execa from 'execa';
test.beforeEach(async t => {
// Save the current working diretory
t.context.cwd = process.cwd();
// Start the local NPM registry
await start();
});
test.afterEach.always(async t => {
// Restore the current working directory
process.chdir(t.context.cwd);
// Stop the local NPM registry
await stop();
});
test.serial('Release patch, minor and major versions', async t => {
// Environment variables used with cli
const env = {
CI: true,
npm_config_registry: uri,
GH_TOKEN: 'github_token',
NPM_OLD_TOKEN: 'aW50ZWdyYXRpb246c3VjaHNlY3VyZQ==',
NPM_EMAIL: 'integration@test.com',
};
// Create a git repository, set the current working directory at the root of the repo
t.log('Create git repository');
await gitRepo();
// Create package.json in repository root
await writeJson('./package.json', {
name: 'test-module',
version: '0.0.0-dev',
repository: {url: 'git+https://github.com/semantic-release/test-module'},
release: {verifyConditions: require.resolve('../src/lib/plugin-noop')},
});
// Create a npm-shrinkwrap.json file
await execa('npm', ['shrinkwrap'], {env});
/** No release **/
t.log('Commit a chore');
await gitCommits(['chore: Init repository']);
t.log('$ semantic-release pre');
let {stdout, stderr, code} = await t.throws(execa(require.resolve('../bin/semantic-release'), ['pre'], {env}));
t.regex(stderr, /ENOCHANGE There are no relevant changes, so no new version is released/);
t.is(code, 1);
/** Minor release **/
t.log('Commit a feature');
await gitCommits(['feat: Initial commit']);
t.log('$ semantic-release pre');
({stdout, stderr, code} = await execa(require.resolve('../bin/semantic-release'), ['pre'], {env}));
// Verify package.json and npm-shrinkwrap.json have been updated
t.is((await readJson('./package.json')).version, '1.0.0');
t.is((await readJson('./npm-shrinkwrap.json')).version, '1.0.0');
t.log('$ npm publish');
({stdout, stderr, code} = await execa('npm', ['publish'], {env}));
// Verify output of npm publish
t.regex(stdout, /test-module@1.0.0/);
t.is(code, 0);
// Retrieve the published package from the registry and check version and gitHead
let [, version, releaseGitHead] = /^version = '(.+)'\s+gitHead = '(.+)'$/.exec(
(await execa('npm', ['show', 'test-module', 'version', 'gitHead'], {env})).stdout
);
t.is(version, '1.0.0');
t.is(releaseGitHead, await gitHead());
t.log(`+ released ${version} with gitHead ${releaseGitHead}`);
/** Patch release **/
t.log('Commit a fix');
await gitCommits(['fix: bar']);
t.log('$ semantic-release pre');
({stdout, stderr, code} = await execa(require.resolve('../bin/semantic-release'), ['pre'], {env}));
// Verify package.json and npm-shrinkwrap.json have been updated
t.is((await readJson('./package.json')).version, '1.0.1');
t.is((await readJson('./npm-shrinkwrap.json')).version, '1.0.1');
t.log('$ npm publish');
({stdout, stderr, code} = await execa('npm', ['publish'], {env}));
// Verify output of npm publish
t.regex(stdout, /test-module@1.0.1/);
t.is(code, 0);
// Retrieve the published package from the registry and check version and gitHead
[, version, releaseGitHead] = /^version = '(.+)'\s+gitHead = '(.+)'$/.exec(
(await execa('npm', ['show', 'test-module', 'version', 'gitHead'], {env})).stdout
);
t.is(version, '1.0.1');
t.is(releaseGitHead, await gitHead());
t.log(`+ released ${version} with gitHead ${releaseGitHead}`);
/** Minor release **/
t.log('Commit a feature');
await gitCommits(['feat: baz']);
t.log('$ semantic-release pre');
({stdout, stderr, code} = await execa(require.resolve('../bin/semantic-release'), ['pre'], {env}));
// Verify package.json and npm-shrinkwrap.json have been updated
t.is((await readJson('./package.json')).version, '1.1.0');
t.is((await readJson('./npm-shrinkwrap.json')).version, '1.1.0');
t.log('$ npm publish');
({stdout, stderr, code} = await execa('npm', ['publish'], {env}));
// Verify output of npm publish
t.regex(stdout, /test-module@1.1.0/);
t.is(code, 0);
// Retrieve the published package from the registry and check version and gitHead
[, version, releaseGitHead] = /^version = '(.+)'\s+gitHead = '(.+)'$/.exec(
(await execa('npm', ['show', 'test-module', 'version', 'gitHead'], {env})).stdout
);
t.is(version, '1.1.0');
t.is(releaseGitHead, await gitHead());
t.log(`+ released ${version} with gitHead ${releaseGitHead}`);
/** Major release **/
t.log('Commit a breaking change');
await gitCommits(['feat: foo\n\n BREAKING CHANGE: bar']);
t.log('$ semantic-release pre');
({stdout, stderr, code} = await execa(require.resolve('../bin/semantic-release'), ['pre'], {env}));
// Verify package.json and npm-shrinkwrap.json have been updated
t.is((await readJson('./package.json')).version, '2.0.0');
t.is((await readJson('./npm-shrinkwrap.json')).version, '2.0.0');
t.log('$ npm publish');
({stdout, stderr, code} = await execa('npm', ['publish'], {env}));
// Verify output of npm publish
t.regex(stdout, /test-module@2.0.0/);
t.is(code, 0);
// Retrieve the published package from the registry and check version and gitHead
[, version, releaseGitHead] = /^version = '(.+)'\s+gitHead = '(.+)'$/.exec(
(await execa('npm', ['show', 'test-module', 'version', 'gitHead'], {env})).stdout
);
t.is(version, '2.0.0');
t.is(releaseGitHead, await gitHead());
t.log(`+ released ${version} with gitHead ${releaseGitHead}`);
});

View File

@ -1,12 +0,0 @@
var nixt = require('nixt')
module.exports = function (cwd, uri) {
return nixt()
.cwd(cwd)
.env('NPM_OLD_TOKEN', 'aW50ZWdyYXRpb246c3VjaHNlY3VyZQ==')
.env('NPM_EMAIL', 'integration@test.com')
.env('GH_TOKEN', 'ghtoken')
.env('CI', 'true')
.env('npm_config_registry', uri)
.clone()
}

View File

@ -1,36 +0,0 @@
var exec = require('child_process').exec
var join = require('path').join
var writeFileSync = require('fs').writeFileSync
var mkdirp = require('mkdirp')
module.exports = function (name, registry, cb) {
var cwd = join(__dirname, '../tmp', name)
mkdirp.sync(cwd)
writeFileSync(join(cwd, 'package.json'), JSON.stringify({
name: name,
repository: {
url: 'git+https://github.com/semantic-release/test'
},
release: {
verifyConditions: '../../../src/lib/plugin-noop'
}
}, null, 2))
exec(
'git init && ' +
'git config user.email "integration@test" && ' +
'git config user.name "Integration Test" && ' +
'git add . && ' +
'git commit -m "chore: root"'
, {cwd: cwd}, function (err, stdout, stderr) {
if (err) {
console.log(stdout, stderr)
return cb(err)
}
cb(null, cwd)
})
}

View File

@ -1,30 +0,0 @@
var mockSpawn = require('mock-spawn')()
mockSpawn.setStrategy(function (command, args, opts) {
return function (cb) {
this.stdout.write(
/\.\.HEAD/.test(args.join(' '))
? rawCommits[0]
: rawCommits.join()
)
cb(0)
}
})
const rawCommits = [
'hash-one==SPLIT==commit-one==END==\n',
'hash-two==SPLIT==commit-two==END==\n'
]
module.exports = {
exec: function (command, options, cb) {
if (typeof cb === 'undefined' && typeof options === 'function') {
cb = options
}
if (/contains/.test(command)) {
if (/notinhistory/.test(command)) return cb(new Error())
return cb(null, 'whatever\nmaster\n')
}
},
spawn: mockSpawn,
'@noCallThru': true
}

View File

@ -1,3 +0,0 @@
module.exports = function (cb) {
cb(null, 'bar')
}

View File

@ -1,17 +0,0 @@
module.exports = function () {
return {
authenticate: function () {
return true
},
gitdata: {
createReference: function (release, cb) {
cb(null)
}
},
repos: {
createRelease: function (release, cb) {
cb(null)
}
}
}
}

View File

@ -1,3 +0,0 @@
module.exports = function (config, options, cb) {
cb(new Error('a'))
}

View File

@ -1,3 +0,0 @@
module.exports = function (config, options, cb) {
cb(new Error('b'))
}

View File

@ -1,3 +0,0 @@
module.exports = function (config, options, cb) {
cb(null, 'a')
}

View File

@ -1,3 +0,0 @@
module.exports = function (config, options, cb) {
cb(null, 'b')
}

View File

@ -1,28 +0,0 @@
const nock = require('nock')
const availableModule = {
'dist-tags': {
latest: '1.33.7',
foo: '0.8.15'
},
versions: {
'0.8.15': {
gitHead: 'bar'
},
'1.33.7': {
gitHead: 'HEAD'
}
}
}
module.exports = nock('http://registry.npmjs.org')
.get('/available')
.reply(200, availableModule)
.get('/tagged')
.reply(200, availableModule)
.get('/untagged')
.reply(200, availableModule)
.get('/@scoped%2Favailable')
.reply(200, availableModule)
.get('/unavailable')
.reply(404, {})

78
test/plugins.test.js Normal file
View File

@ -0,0 +1,78 @@
import test from 'ava';
import plugins from '../src/lib/plugins';
test('Export plugins', t => {
// Call the plugin module
const defaultPlugins = plugins({});
// Verify the module returns a function for each plugin
t.is(typeof defaultPlugins.analyzeCommits, 'function');
t.is(typeof defaultPlugins.generateNotes, 'function');
t.is(typeof defaultPlugins.verifyConditions, 'function');
t.is(typeof defaultPlugins.verifyRelease, 'function');
t.is(typeof defaultPlugins.getLastRelease, 'function');
});
test('Pipeline - Get all results', async t => {
// Call the plugin module with a verifyRelease plugin pipeline
const pipelinePlugins = plugins({
verifyRelease: ['./src/lib/plugin-noop', './test/fixtures/plugin-result-a', './test/fixtures/plugin-result-b'],
});
// Call the verifyRelease pipeline
const results = await pipelinePlugins.verifyRelease({});
// Verify the pipeline return the expected result for each plugin, in order
t.deepEqual(results, [undefined, 'a', 'b']);
});
test('Pipeline - Pass pluginConfig and options to each plugins', async t => {
// Plugin configuration with options (plugin-result-config is a mock plugin returning its pluginConfig and options parameters)
const pluginConfig = {path: './test/fixtures/plugin-result-config', pluginParam: 'param1'};
// Semantic-release global options
const options = {semanticReleaseParam: 'param2'};
// Call the plugin module with a verifyRelease plugin pipeline
const pipelinePlugins = plugins({
verifyRelease: [pluginConfig, './test/fixtures/plugin-result-config'],
});
// Call the verifyRelease pipeline
const results = await pipelinePlugins.verifyRelease(options);
// Verify the pipeline first result is the pluginConfig and options parameters (to verify the plugin was called with the defined pluginConfig and options parameters)
t.deepEqual(results, [{pluginConfig, options}, {pluginConfig: {}, options}]);
});
test('Pipeline - Get first error', async t => {
// Call the plugin module with a verifyRelease plugin pipeline
const pipelinePlugins = plugins({
verifyRelease: ['./src/lib/plugin-noop', './test/fixtures/plugin-error-a', './test/fixtures/plugin-error-b'],
});
// Call the verifyRelease pipeline and verify it returns the error thrown by './test/fixtures/plugin-error-a'
await t.throws(pipelinePlugins.verifyRelease({}), 'a');
});
test('Normalize and load plugin from string', t => {
// Call the normalize function with a path
const plugin = plugins.normalize('./src/lib/plugin-noop');
// Verify the plugin is loaded
t.is(typeof plugin, 'function');
});
test('Normalize and load plugin from object', t => {
// Call the normalize function with an object (with path property)
const plugin = plugins.normalize({path: './src/lib/plugin-noop'});
// Verify the plugin is loaded
t.is(typeof plugin, 'function');
});
test('load from fallback', t => {
// Call the normalize function with a fallback
const plugin = plugins.normalize(null, '../lib/plugin-noop');
// Verify the fallback plugin is loaded
t.is(typeof plugin, 'function');
});

151
test/post.test.js Normal file
View File

@ -0,0 +1,151 @@
import {callbackify} from 'util';
import test from 'ava';
import {gitRepo, gitCommits, gitHead} from './helpers/git-utils';
import {stub} from 'sinon';
import nock from 'nock';
import {authenticate} from './helpers/mock-github';
import post from '../src/post';
test.beforeEach(t => {
// Save the current working diretory
t.context.cwd = process.cwd();
});
test.afterEach.always(t => {
// Restore the current working directory
process.chdir(t.context.cwd);
// Reset nock
nock.cleanAll();
});
test.serial('Post run with github token', async t => {
// Create a git repository, set the current working directory at the root of the repo
await gitRepo();
// Add commits to the master branch
await gitCommits(['fix: First fix', 'feat: Second feature']);
const sha = await gitHead();
const owner = 'test_user';
const repo = 'test_repo';
const githubUrl = 'https://testurl.com:443';
const githubToken = 'github_token';
const githubApiPathPrefix = 'prefix';
const releaseLog = 'Test release note body';
// Stub the generateNotes plugin
const generateNotes = stub().resolves(releaseLog);
const version = '1.0.0';
const branch = 'master';
const debug = false;
const tagName = `v${version}`;
const options = {branch, debug, githubUrl, githubToken, githubApiPathPrefix};
const pkg = {version, repository: {url: `git+https://othertesturl.com/${owner}/${repo}.git`}};
// Mock github API for releases and git/refs endpoints
const github = authenticate({githubUrl, githubToken, githubApiPathPrefix})
.post(`/repos/${owner}/${repo}/releases`, {
tag_name: tagName,
target_commitish: branch,
name: tagName,
body: releaseLog,
draft: debug,
})
.reply({})
.post(`/repos/${owner}/${repo}/git/refs`, {ref: `refs/tags/${tagName}`, sha})
.reply({});
// Call the post module
const result = await post({pkg, options, plugins: {generateNotes: callbackify(generateNotes)}});
// Verify the getLastRelease plugin has been called with 'options' and 'pkg'
t.true(generateNotes.calledOnce);
t.deepEqual(generateNotes.firstCall.args[0].options, options);
t.deepEqual(generateNotes.firstCall.args[0].pkg, pkg);
// Verify the published release note
t.deepEqual(result, {
published: true,
release: {owner, repo, tag_name: tagName, name: tagName, target_commitish: branch, draft: debug, body: releaseLog},
});
// Verify the releases and git/refs endpoint have been call with expected requests
t.true(github.isDone());
});
test.serial('Post dry run with github token', async t => {
// Create a git repository, set the current working directory at the root of the repo
await gitRepo();
// Add commits to the master branch
await gitCommits(['fix: First fix', 'feat: Second feature']);
const owner = 'test_user';
const repo = 'test_repo';
const githubToken = 'github_token';
const releaseLog = 'Test release note body';
// Stub the generateNotes plugin
const generateNotes = stub().resolves(releaseLog);
const version = '1.0.0';
const branch = 'master';
const debug = true;
const tagName = `v${version}`;
const options = {branch, debug, githubToken};
const pkg = {version, repository: {url: `git+https://othertesturl.com/${owner}/${repo}.git`}};
// Mock github API for releases endpoint
const github = authenticate({githubToken})
.post(`/repos/${owner}/${repo}/releases`, {
tag_name: tagName,
target_commitish: branch,
name: tagName,
body: releaseLog,
draft: debug,
})
.reply({});
// Call the post module
const result = await post({pkg, options, plugins: {generateNotes: callbackify(generateNotes)}});
// Verify the getLastRelease plugin has been called with 'options' and 'pkg'
t.true(generateNotes.calledOnce);
t.deepEqual(generateNotes.firstCall.args[0].options, options);
t.deepEqual(generateNotes.firstCall.args[0].pkg, pkg);
// Verify the published release note
t.deepEqual(result, {
published: true,
release: {owner, repo, tag_name: tagName, name: tagName, target_commitish: branch, draft: debug, body: releaseLog},
});
// Verify the releases and git/refs endpoint have been call with expected requests
t.true(github.isDone());
});
test.serial('Post dry run without github token', async t => {
// Create a git repository, set the current working directory at the root of the repo
await gitRepo();
// Add commits to the master branch
await gitCommits(['fix: First fix', 'feat: Second feature']);
const owner = 'test_user';
const repo = 'test_repo';
const releaseLog = 'Test release note body';
// Stub the generateNotes plugin
const generateNotes = stub().resolves(releaseLog);
const version = '1.0.0';
const branch = 'master';
const debug = true;
const tagName = `v${version}`;
const options = {branch, debug};
const pkg = {version, repository: {url: `git+https://othertesturl.com/${owner}/${repo}.git`}};
// Call the post module
const result = await post({pkg, options, plugins: {generateNotes: callbackify(generateNotes)}});
// Verify the getLastRelease plugin has been called with 'options' and 'pkg'
t.true(generateNotes.calledOnce);
t.deepEqual(generateNotes.firstCall.args[0].options, options);
t.deepEqual(generateNotes.firstCall.args[0].pkg, pkg);
// Verify the release note
t.deepEqual(result, {
published: false,
release: {owner, repo, tag_name: tagName, name: tagName, target_commitish: branch, draft: debug, body: releaseLog},
});
});

163
test/pre.test.js Normal file
View File

@ -0,0 +1,163 @@
import {callbackify} from 'util';
import test from 'ava';
import {gitRepo, gitCommits} from './helpers/git-utils';
import proxyquire from 'proxyquire';
import {stub} from 'sinon';
// Stub to capture the log messages
const errorLog = stub();
// Module to test
const pre = proxyquire('../src/pre', {
'./lib/get-commits': proxyquire('../src/lib/get-commits', {npmlog: {error: errorLog}}),
});
test.beforeEach(t => {
// Save the current working diretory
t.context.cwd = process.cwd();
// Reset the stub call history
errorLog.resetHistory();
});
test.afterEach.always(t => {
// Restore the current working directory
process.chdir(t.context.cwd);
});
test.serial('Increase version', async t => {
// Create a git repository, set the current working directory at the root of the repo
await gitRepo();
// Add commits to the master branch
const cmts = await gitCommits(['fix: First fix', 'feat: Second feature']);
const options = {branch: 'master'};
const pkg = {name: 'available'};
const lastRelease = {version: '1.0.0', gitHead: cmts[cmts.length - 1].hash};
// Stub the getLastRelease, analyzeCommits and verifyRelease plugins
const getLastRelease = stub().resolves(lastRelease);
const analyzeCommits = stub().resolves('major');
const verifyRelease = stub().resolves();
// Call the pre module
const nextRelease = await pre({
options,
pkg,
plugins: {
getLastRelease: callbackify(getLastRelease),
analyzeCommits: callbackify(analyzeCommits),
verifyRelease: callbackify(verifyRelease),
},
});
// Verify the pre module returns the 'type' returned by analyzeCommits and the 'version' returned by getLastRelease increamented with the 'type' (current version 1.0.0 => major release = version 2.0.0)
t.deepEqual(nextRelease, {type: 'major', version: '2.0.0'});
// Verify the getLastRelease plugin has been called with 'options' and 'pkg'
t.true(getLastRelease.calledOnce);
t.deepEqual(getLastRelease.firstCall.args[0].options, options);
t.deepEqual(getLastRelease.firstCall.args[0].pkg, pkg);
// Verify the analyzeCommits plugin has been called with the repo 'commits' since lastVersion githead
t.true(analyzeCommits.calledOnce);
t.is(analyzeCommits.firstCall.args[0].commits.length, 1);
t.is(analyzeCommits.firstCall.args[0].commits[0].hash.substring(0, 7), cmts[0].hash);
t.is(analyzeCommits.firstCall.args[0].commits[0].message, cmts[0].message);
// Verify the verifyRelease plugin has been called with 'lastRelease' and 'nextRelease'
t.true(verifyRelease.calledOnce);
t.deepEqual(verifyRelease.firstCall.args[0].lastRelease, lastRelease);
t.deepEqual(verifyRelease.firstCall.args[0].nextRelease, nextRelease);
});
test.serial('Initial version', async t => {
// Create a git repository, set the current working directory at the root of the repo
await gitRepo();
// Add commits to the master branch
const cmts = await gitCommits(['fix(scope1): First fix', 'feat(scope2): Second feature']);
const options = {branch: 'master'};
const pkg = {name: 'available'};
const lastRelease = {version: null, gitHead: undefined};
// Stub the getLastRelease, analyzeCommits and verifyRelease plugins
const getLastRelease = stub().resolves({version: null, gitHead: undefined});
const analyzeCommits = stub().resolves('major');
const verifyRelease = stub().resolves();
// Call the pre module
const nextRelease = await pre({
options,
pkg,
plugins: {
getLastRelease: callbackify(getLastRelease),
analyzeCommits: callbackify(analyzeCommits),
verifyRelease: callbackify(verifyRelease),
},
});
// Verify the pre module returns the 'type' returned by analyzeCommits and the 'version' returned by getLastRelease increamented with the 'type' (no current version => initial release = version 1.0.0)
t.deepEqual(nextRelease, {type: 'initial', version: '1.0.0'});
// Verify the getLastRelease plugin has been called with 'options' and 'pkg'
t.true(getLastRelease.calledOnce);
t.deepEqual(getLastRelease.firstCall.args[0].options, options);
t.deepEqual(getLastRelease.firstCall.args[0].pkg, pkg);
// Verify the analyzeCommits plugin has been called with all the repo 'commits'
t.true(analyzeCommits.calledOnce);
t.is(analyzeCommits.firstCall.args[0].commits.length, 2);
t.is(analyzeCommits.firstCall.args[0].commits[0].hash.substring(0, 7), cmts[0].hash);
t.is(analyzeCommits.firstCall.args[0].commits[0].message, cmts[0].message);
t.is(analyzeCommits.firstCall.args[0].commits[1].hash.substring(0, 7), cmts[1].hash);
t.is(analyzeCommits.firstCall.args[0].commits[1].message, cmts[1].message);
// Verify the verifyRelease plugin has been called with 'lastRelease' and 'nextRelease'
t.true(verifyRelease.calledOnce);
t.deepEqual(verifyRelease.firstCall.args[0].lastRelease, lastRelease);
t.deepEqual(verifyRelease.firstCall.args[0].nextRelease, nextRelease);
});
test.serial('Throws error if verifyRelease fails', async t => {
// Create a git repository, set the current working directory at the root of the repo
await gitRepo();
// Add commits to the master branch
const cmts = await gitCommits(['fix: First fix', 'feat: Second feature']);
const options = {branch: 'master'};
const pkg = {name: 'available'};
const lastRelease = {version: '1.0.0', gitHead: cmts[cmts.length - 1].hash};
// Stub the getLastRelease, analyzeCommits and verifyRelease plugins
const getLastRelease = stub().resolves(lastRelease);
const analyzeCommits = stub().resolves('major');
const verifyRelease = stub().rejects(new Error('verifyRelease failed'));
// Call the pre module and verify it returns the Error returned by verifyRelease
const error = await t.throws(
pre({
options,
pkg,
plugins: {
getLastRelease: callbackify(getLastRelease),
analyzeCommits: callbackify(analyzeCommits),
verifyRelease: callbackify(verifyRelease),
},
})
);
// Verify the error message is the one returned by verifyRelease
t.is(error.message, 'verifyRelease failed');
// Verify the getLastRelease plugin has been called with 'options' and 'pkg'
t.true(getLastRelease.calledOnce);
t.deepEqual(getLastRelease.firstCall.args[0].options, options);
t.deepEqual(getLastRelease.firstCall.args[0].pkg, pkg);
// Verify the analyzeCommits plugin has been called with all the repo 'commits'
t.true(analyzeCommits.calledOnce);
t.is(analyzeCommits.firstCall.args[0].commits.length, 1);
t.is(analyzeCommits.firstCall.args[0].commits[0].hash.substring(0, 7), cmts[0].hash);
t.is(analyzeCommits.firstCall.args[0].commits[0].message, cmts[0].message);
// Verify the verifyRelease plugin has been called with 'lastRelease' and 'nextRelease'
t.true(verifyRelease.calledOnce);
t.deepEqual(verifyRelease.firstCall.args[0].lastRelease, lastRelease);
t.deepEqual(verifyRelease.firstCall.args[0].nextRelease, {type: 'major', version: '2.0.0'});
});

View File

@ -1,11 +0,0 @@
var exec = require('child_process').exec
var opts = {
cwd: __dirname
}
module.exports = {
start: exec.bind(null, './start.sh', opts),
stop: exec.bind(null, './stop.sh', opts),
uri: 'http://localhost:' + (process.env.TRAVIS === 'true' ? 5984 : 15986) + '/registry/_design/app/_rewrite/'
}

View File

@ -1,96 +0,0 @@
var join = require('path').join
var tap = require('tap')
var rimraf = require('rimraf')
var registry = require('../registry')
var testModule = require('../lib/test-module')
var baseScenario = require('../lib/base-scenario')
var tearDown = tap.tearDown
var test = tap.test
test('change version', {bail: process.env.TRAVIS === 'true'}, function (t) {
t.plan(7)
registry.start(function (err, stdout, stderr) {
t.error(err, 'registry started')
if (err) return t.end()
testModule('change-version', registry.uri, function (err, cwd) {
t.error(err, 'test-module created')
if (err) return t.end()
t.test('no version', function (tt) {
tt.plan(1)
baseScenario(cwd, registry.uri)
.env('npm_config_loglevel', 'info')
.run('node ../../../bin/semantic-release.js pre')
.stderr(/ENOCHANGE/)
.code(1)
.end(tt.error)
})
t.test('initial version', function (tt) {
tt.plan(1)
baseScenario(cwd, registry.uri)
.exec('git commit -m "feat: initial" --allow-empty')
.exec('node ../../../bin/semantic-release.js pre')
.run('npm publish')
.stdout(/1\.0\.0/)
.code(0)
.end(tt.error)
})
t.test('patch version', function (tt) {
tt.plan(1)
baseScenario(cwd, registry.uri)
.exec('git commit -m "fix: foo" --allow-empty')
.exec('node ../../../bin/semantic-release.js pre')
.run('npm publish')
.stdout(/1\.0\.1/)
.code(0)
.end(tt.error)
})
t.test('feature version', function (tt) {
tt.plan(1)
baseScenario(cwd, registry.uri)
.exec('git commit -m "feat: foo" --allow-empty')
.exec('node ../../../bin/semantic-release.js pre')
.run('npm publish')
.code(0)
.stdout(/1\.1\.0/)
.end(tt.error)
})
t.test('breaking version', function (tt) {
tt.plan(1)
baseScenario(cwd, registry.uri)
.exec('git commit -m "feat: foo\n\n BREAKING CHANGE: bar" --allow-empty')
.exec('node ../../../bin/semantic-release.js pre')
.run('npm publish')
.code(0)
.stdout(/2\.0\.0/)
.end(tt.error)
})
})
})
})
tearDown(function () {
if (process.env.TRAVIS === 'true') return
function cb (err, stdout, stderr) {
if (err) console.log(err)
if (stderr) console.log(stderr)
}
rimraf(join(__dirname, '../tmp'), cb)
registry.stop(cb)
})

View File

@ -1,43 +0,0 @@
var test = require('tap').test
var proxyquire = require('proxyquire')
var commits = proxyquire('../../src/lib/commits', {
'npmlog': {
error: function () {}
},
'child_process': require('../mocks/child-process')
})
test('commits since last release', function (t) {
t.test('get all commits', function (tt) {
commits({lastRelease: {}, options: {branch: 'master'}}, function (err, commits) {
tt.error(err)
tt.is(commits.length, 2, 'all commits')
tt.is(commits[0].hash, 'hash-one', 'parsed hash')
tt.is(commits[1].message, 'commit-two', 'parsed message')
tt.end()
})
})
t.test('get commits since hash', function (tt) {
commits({lastRelease: {gitHead: 'hash'}, options: {branch: 'master'}}, function (err, commits) {
tt.error(err)
tt.is(commits.length, 1, 'specified commits')
tt.is(commits[0].hash, 'hash-one', 'parsed hash')
tt.is(commits[0].message, 'commit-one', 'parsed message')
tt.end()
})
})
t.test('get commits since hash', function (tt) {
commits({lastRelease: {gitHead: 'notinhistory'}, options: {branch: 'notmaster'}}, function (err, commits) {
tt.ok(err)
tt.is(err.code, 'ENOTINHISTORY')
tt.end()
})
})
t.end()
})

View File

@ -1,44 +0,0 @@
const test = require('tap').test
const getRegistry = require('../../src/lib/get-registry')
test('get correct registry', function (t) {
t.is(getRegistry({
name: 'publish-config',
publishConfig: {
registry: 'a'
}},
{}), 'a')
t.is(getRegistry({name: 'normal'}, {
get: function () {
return 'b'
}
}), 'b')
t.is(getRegistry({name: 'normal'}, {
get: function () {
return null
}
}), 'https://registry.npmjs.org/')
t.is(getRegistry({name: '@scoped/foo'}, {
get: function (input) {
return input === '@scoped/registry' ? 'c' : 'd'
}
}), 'c')
t.is(getRegistry({name: '@scoped/bar'}, {
get: function () {
return 'e'
}
}), 'e')
t.is(getRegistry({name: '@scoped/baz'}, {
get: function () {
return null
}
}), 'https://registry.npmjs.org/')
t.end()
})

View File

@ -1,97 +0,0 @@
var test = require('tap').test
var plugins = require('../../src/lib/plugins')
test('export plugins', function (t) {
t.plan(5)
var defaultPlugins = plugins({})
t.is(typeof defaultPlugins.analyzeCommits, 'function')
t.is(typeof defaultPlugins.generateNotes, 'function')
t.is(typeof defaultPlugins.verifyConditions, 'function')
t.is(typeof defaultPlugins.verifyRelease, 'function')
t.is(typeof defaultPlugins.getLastRelease, 'function')
})
test('plugin pipelines', function (t) {
t.plan(3)
t.test('get all results', function (tt) {
var pipelinePlugins = plugins({
verifyRelease: [
'./src/lib/plugin-noop',
'./test/mocks/plugin-result-a',
'./test/mocks/plugin-result-b'
]
})
pipelinePlugins.verifyRelease({}, function (err, results) {
tt.error(err)
tt.same(results, [undefined, 'a', 'b'])
tt.end()
})
})
t.test('get first error', function (tt) {
var pipelinePlugins = plugins({
verifyConditions: [
'./src/lib/plugin-noop',
'./test/mocks/plugin-error-a',
'./test/mocks/plugin-error-b'
]
})
pipelinePlugins.verifyConditions({}, function (err) {
tt.is(err.message, 'a')
tt.end()
})
})
t.test('get error and only results before', function (tt) {
var pipelinePlugins = plugins({
verifyRelease: [
'./src/lib/plugin-noop',
'./test/mocks/plugin-result-a',
'./test/mocks/plugin-error-b',
'./test/mocks/plugin-result-b'
]
})
pipelinePlugins.verifyRelease({}, function (err, results) {
tt.is(err.message, 'b')
tt.same(results, [undefined, 'a', undefined])
tt.end()
})
})
})
test('normalize and load plugin', function (t) {
t.test('load from string', function (tt) {
var plugin = plugins.normalize('./src/lib/plugin-noop')
tt.is(typeof plugin, 'function')
tt.end()
})
t.test('load from object', function (tt) {
var plugin = plugins.normalize({
path: './src/lib/plugin-noop'
})
tt.is(typeof plugin, 'function')
tt.end()
})
t.test('load from fallback', function (tt) {
var plugin = plugins.normalize(null, '../../src/lib/plugin-noop')
tt.is(typeof plugin, 'function')
tt.end()
})
t.end()
})

View File

@ -1,74 +0,0 @@
var defaults = require('lodash').defaults
var test = require('tap').test
var proxyquire = require('proxyquire')
var post = proxyquire('../../src/post', {
'git-head': require('../mocks/git-head'),
github: require('../mocks/github')
})
var pkg = {
version: '1.0.0',
repository: {url: 'http://github.com/whats/up.git'}
}
var plugins = {
generateNotes: function (pkg, cb) {
cb(null, 'the log')
}
}
var defaultRelease = {
owner: 'whats',
repo: 'up',
name: 'v1.0.0',
tag_name: 'v1.0.0',
target_commitish: 'master',
body: 'the log'
}
test('full post run', function (t) {
t.test('in debug mode w/o token', function (tt) {
post({
options: {debug: true, branch: 'master'},
pkg: pkg,
plugins: plugins
}, function (err, published, release) {
tt.error(err)
tt.is(published, false)
tt.match(release, defaults({draft: true}, defaultRelease))
tt.end()
})
})
t.test('in debug mode w/token', function (tt) {
post({
options: {debug: true, githubToken: 'yo', branch: 'master'},
pkg: pkg,
plugins: plugins
}, function (err, published, release) {
tt.error(err)
tt.is(published, true)
tt.match(release, defaults({draft: true}, defaultRelease))
tt.end()
})
})
t.test('production', function (tt) {
post({
options: {githubToken: 'yo', branch: 'master'},
pkg: pkg,
plugins: plugins
}, function (err, published, release) {
tt.error(err)
tt.is(published, true)
tt.match(release, defaultRelease)
tt.end()
})
})
t.end()
})

View File

@ -1,64 +0,0 @@
var test = require('tap').test
var proxyquire = require('proxyquire')
require('../mocks/registry')
var pre = proxyquire('../../src/pre', {
'./lib/commits': proxyquire('../../src/lib/commits', {
'child_process': require('../mocks/child-process')
})
})
var versions = {
available: '1.0.0'
}
var plugins = {
verifyRelease: function (release, cb) {
cb(null, release)
},
analyzeCommits: function (commits, cb) {
cb(null, 'major')
},
getLastRelease: function (config, cb) {
cb(null, {version: versions[config.pkg.name] || null, gitHead: 'HEAD'})
}
}
var npm = {
registry: 'http://registry.npmjs.org/',
tag: 'latest'
}
test('full pre run', function (t) {
t.test('increase version', function (tt) {
tt.plan(3)
pre({
options: {branch: 'master'},
npm: npm,
pkg: {name: 'available'},
plugins: plugins
}, function (err, release) {
tt.error(err)
tt.is(release.type, 'major')
tt.is(release.version, '2.0.0')
})
})
t.test('increase version', function (tt) {
tt.plan(3)
pre({
options: {branch: 'master'},
npm: npm,
pkg: {name: 'unavailable'},
plugins: plugins
}, function (err, release) {
tt.error(err)
tt.is(release.type, 'initial')
tt.is(release.version, '1.0.0')
})
})
t.end()
})

View File

@ -1,60 +0,0 @@
var test = require('tap').test
var type = require('../../src/lib/type')
test('get type from commits', function (t) {
t.test('get type from plugin', function (tt) {
tt.plan(2)
type({
commits: [{
hash: '0',
message: 'a'
}],
lastRelease: {version: '1.0.0'},
plugins: {
analyzeCommits: function (config, cb) {
cb(null, 'major')
}
}
}, function (err, type) {
tt.error(err)
tt.is(type, 'major')
})
})
t.test('error when no changes', function (tt) {
tt.plan(1)
type({
commits: [],
lastRelease: {},
plugins: {
analyzeCommits: function (config, cb) {
cb(null, null)
}
}
}, function (err) {
tt.is(err.code, 'ENOCHANGE')
})
})
t.test('initial version', function (tt) {
tt.plan(2)
type({
commits: [],
lastRelease: {},
plugins: {
analyzeCommits: function (config, cb) {
cb(null, 'major')
}
}
}, function (err, type) {
tt.error(err)
tt.is(type, 'initial')
})
})
t.end()
})

View File

@ -1,72 +0,0 @@
var test = require('tap').test
var verify = require('../../src/lib/verify')
test('verify pkg, options and env', function (t) {
t.test('dry run verification', function (tt) {
var noErrors = verify({
options: {debug: true},
pkg: {
name: 'package',
repository: {
url: 'http://github.com/whats/up.git'
}
}
})
tt.is(noErrors.length, 0)
var errors = verify({
options: {debug: true},
pkg: {}
})
tt.is(errors.length, 2)
tt.is(errors[0].code, 'ENOPKGNAME')
tt.is(errors[1].code, 'ENOPKGREPO')
tt.end()
})
t.test('dry run verification for gitlab repo', function (tt) {
var noErrors = verify({
options: {debug: true},
pkg: {
name: 'package',
repository: {
url: 'http://gitlab.corp.com/whats/up.git'
}
}
})
tt.is(noErrors.length, 0)
tt.end()
})
t.test('publish verification', function (tt) {
var noErrors = verify({
env: {NPM_TOKEN: 'yo'},
options: {githubToken: 'sup'},
pkg: {
name: 'package',
repository: {
url: 'http://github.com/whats/up.git'
}
}
})
tt.is(noErrors.length, 0)
var errors = verify({env: {}, options: {}, pkg: {}})
tt.is(errors.length, 4)
tt.is(errors[0].code, 'ENOPKGNAME')
tt.is(errors[1].code, 'ENOPKGREPO')
tt.is(errors[2].code, 'ENOGHTOKEN')
tt.is(errors[3].code, 'ENONPMTOKEN')
tt.end()
})
t.end()
})

71
test/verify.test.js Normal file
View File

@ -0,0 +1,71 @@
import test from 'ava';
import verify from '../src/lib/verify';
test('Dry run - Verify pkg, options and env', t => {
// Call the verify module with debug (Dry run), package name and repo URL
const errors = verify({
options: {debug: true},
pkg: {name: 'package', repository: {url: 'http://github.com/whats/up.git'}},
});
// Verify no error has been returned
t.is(errors.length, 0);
});
test('Dry run - Returns errors for missing package name and repo', t => {
// Call the verify module with debug (Dry run), no package name and no repo URL
const errors = verify({options: {debug: true}, pkg: {}});
// Verify the module return an error for each missing configuration
t.is(errors.length, 2);
t.is(errors[0].code, 'ENOPKGNAME');
t.is(errors[1].code, 'ENOPKGREPO');
});
test('Dry run - Verify pkg, options and env for gitlab repo', t => {
// Call the verify module with debug (Dry run), no package name and no repo URL
const errors = verify({
options: {debug: true},
pkg: {name: 'package', repository: {url: 'http://gitlab.corp.com/whats/up.git'}},
});
// Verify no error has been returned
t.is(errors.length, 0);
});
test('Publish - Verify pkg, options and env', t => {
// Call the verify module with package name, repo URL, npm token and github token
const errors = verify({
env: {NPM_TOKEN: 'yo'},
options: {githubToken: 'sup'},
pkg: {name: 'package', repository: {url: 'http://github.com/whats/up.git'}},
});
// Verify no error has been returned
t.is(errors.length, 0);
});
test('Publish - Returns errors for missing package name, repo github token and npm token', t => {
// Call the verify module with no package name, no repo URL, no NPM token and no github token
const errors = verify({env: {}, options: {}, pkg: {}});
// Verify the module return an error for each missing configuration
t.is(errors.length, 4);
t.is(errors[0].code, 'ENOPKGNAME');
t.is(errors[1].code, 'ENOPKGREPO');
t.is(errors[2].code, 'ENOGHTOKEN');
t.is(errors[3].code, 'ENONPMTOKEN');
});
test('Publish - Returns errors for missing email when using legacy npm token', t => {
// Call the verify module with package name, repo URL, old NPM token and github token and no npm email
const errors = verify({
env: {NPM_OLD_TOKEN: 'yo'},
options: {githubToken: 'sup'},
pkg: {name: 'package', repository: {url: 'http://github.com/whats/up.git'}},
});
// Verify the module return an error for each missing configuration
t.is(errors.length, 1);
t.is(errors[0].code, 'ENONPMTOKEN');
});