Compare commits

..

No commits in common. "master" and "v21.1.2" have entirely different histories.

27 changed files with 2328 additions and 8931 deletions

View File

@ -18,8 +18,8 @@ jobs:
name: release name: release
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4
- uses: actions/setup-node@b39b52d1213e96004bfcb1c61a8a6fa8ab84f3e8 # v4.0.1 - uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3
with: with:
cache: npm cache: npm
node-version: lts/* node-version: lts/*

View File

@ -15,22 +15,22 @@ jobs:
id-token: write id-token: write
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0
with: with:
persist-credentials: false persist-credentials: false
- name: Run analysis - name: Run analysis
uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1 uses: ossf/scorecard-action@08b4669551908b1024bb425080c797723083c031 # v2.2.0
with: with:
results_file: results.sarif results_file: results.sarif
results_format: sarif results_format: sarif
publish_results: true publish_results: true
- name: Upload artifact - name: Upload artifact
uses: actions/upload-artifact@26f96dfa697d77e81fd5907df203aa23a56210a8 # v4.3.0 uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 # v3.1.3
with: with:
name: SARIF file name: SARIF file
path: results.sarif path: results.sarif
retention-days: 5 retention-days: 5
- name: Upload to code-scanning - name: Upload to code-scanning
uses: github/codeql-action/upload-sarif@e8893c57a1f3a2b659b6b55564fdfdbbd2982911 # v3.24.0 uses: github/codeql-action/upload-sarif@04daf014b50eaf774287bf3f0f1869d4b4c4b913 # v2.21.7
with: with:
sarif_file: results.sarif sarif_file: results.sarif

View File

@ -15,66 +15,42 @@ on:
permissions: permissions:
contents: read # to fetch code (actions/checkout) contents: read # to fetch code (actions/checkout)
env:
FORCE_COLOR: 1
NPM_CONFIG_COLOR: always
jobs: jobs:
# verify against ranges defined as supported in engines.node
test_matrix: test_matrix:
strategy: strategy:
matrix: matrix:
node-version: node-version:
- 20.8.1 - 18.0.0
- 20 - 19
- 21
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 5 timeout-minutes: 10
steps: steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4
- run: git config --global user.name github-actions - run: git config --global user.name github-actions
- run: git config --global user.email github-actions@github.com - run: git config --global user.email github-actions@github.com
- name: Use Node.js ${{ matrix.node-version }} - name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@b39b52d1213e96004bfcb1c61a8a6fa8ab84f3e8 # v4.0.1 uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3
with: with:
node-version: ${{ matrix.node-version }} node-version: ${{ matrix.node-version }}
cache: npm cache: npm
- run: npm clean-install - run: npm clean-install
- run: npm audit signatures - name: Ensure dependencies are compatible with the version of node
- run: npm test run: npx ls-engines
- run: npm run test:ci
# verify against the node version defined for development in the .nvmrc
test_dev:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- run: git config --global user.name github-actions
- run: git config --global user.email github-actions@github.com
- name: Use Node.js from .nvmrc
uses: actions/setup-node@b39b52d1213e96004bfcb1c61a8a6fa8ab84f3e8 # v4.0.1
with:
node-version-file: .nvmrc
cache: npm
- run: npm clean-install
- run: npm audit signatures
- run: npm test
# separate job to set as required in branch protection, # separate job to set as required in branch protection,
# as the build names above change each time Node versions change # as the build names above change each time Node versions change
test: test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: needs: test_matrix
- test_dev
- test_matrix
if: ${{ !cancelled() }}
steps: steps:
- name: All matrix versions passed - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4
if: ${{ !(contains(needs.*.result, 'failure')) }} - uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3
run: exit 0 with:
- name: Some matrix version failed node-version: lts/*
if: ${{ contains(needs.*.result, 'failure') }} cache: npm
run: exit 1 - run: npm clean-install
- run: npm audit signatures
- run: npm run lint

1
.nvmrc
View File

@ -1 +0,0 @@
20

View File

@ -146,7 +146,7 @@ If possible, make [atomic commits](https://en.wikipedia.org/wiki/Atomic_commit),
- a commit should contain exactly one self-contained functional change - a commit should contain exactly one self-contained functional change
- a functional change should be contained in exactly one commit - a functional change should be contained in exactly one commit
- a commit should not create an inconsistent state (such as test errors, linting errors, partial fix, feature without documentation, etc...) - a commit should not create an inconsistent state (such as test errors, linting errors, partial fix, feature with documentation etc...)
A complex feature can be broken down into multiple commits as long as each one maintains a consistent state and consists of a self-contained change. A complex feature can be broken down into multiple commits as long as each one maintains a consistent state and consists of a self-contained change.
@ -241,82 +241,42 @@ $ git clone https://github.com/semantic-release/<repo-name>
$ cd <repo-name> $ cd <repo-name>
# Assign the original repo to a remote called "upstream" # Assign the original repo to a remote called "upstream"
$ git remote add upstream https://github.com/semantic-release/<repo-name> $ git remote add upstream https://github.com/semantic-release/<repo-name>
# Switch your node version to the version defined by the project as the development version
# This step assumes you have already installed and configured https://github.com/nvm-sh/nvm
# You may need to run `nvm install` if you have not already installed the development node version
$ nvm use
# Install the dependencies # Install the dependencies
$ npm install $ npm install
``` ```
### Verification ### Lint
The `test` script is structured to execute as much of the verification for the project as possible. All the [semantic-release](https://github.com/semantic-release) repositories use [XO](https://github.com/sindresorhus/xo) for linting and [Prettier](https://prettier.io) for formatting.
Ensuring that the `test` script fully passes in the node version defined as the development version in the `.nvmrc` Prettier formatting will be automatically verified and fixed by XO.
minimizes the chances of the test workflow failing after pushing your changes.
> [!IMPORTANT] Before pushing your code changes make sure there are no linting errors with `npm run lint`.
> Before pushing your code changes, be sure to run the verification for the project with `npm test`.
[npm-run-all2](https://www.npmjs.com/package/npm-run-all2) is used to enable running multiple independent lint and test **Tips**:
scripts together from the `test` script.
This enables the test script to not only run all scripts, but also parallelize some of the scripts to optimize the overall
time required for verification.
When a failure occurs with the `test`, the output can be a bit confusing because there may be output from multiple parallel - Most linting errors can be automatically fixed with `npm run lint -- --fix`.
scripts mixed together. - Install the [XO plugin](https://github.com/sindresorhus/xo#editor-plugins) for your editor to see linting errors directly in your editor and automatically fix them on save.
To investigate the failure with cleaner output, re-run the problematic script directly using the script name from the label
included on the left side of the output
```shell ### Tests
$ npm run <script-name>
```
#### Lint Running the integration test requires you to install [Docker](https://docs.docker.com/engine/installation) on your machine.
##### Prettier
All the [semantic-release](https://github.com/semantic-release) repositories use [Prettier](https://prettier.io) for formatting.
Prettier formatting will be automatically verified by the `lint:prettier` script, included in the `test` script.
> [!NOTE]
> Most linting errors can be automatically fixed with `npm run lint:prettier:fix`.
##### Other Lint Tools
Other tools are used for specific compatibility concerns, but are less likely to result in failures in common contributions.
Please follow the guidance of these tools if failures are encountered.
#### Tests
> [!NOTE]
> Before pushing your code changes make sure all **tests pass** and the unit test **coverage is 100%**:
All the [semantic-release](https://github.com/semantic-release) repositories use [AVA](https://github.com/avajs/ava) for writing and running tests. All the [semantic-release](https://github.com/semantic-release) repositories use [AVA](https://github.com/avajs/ava) for writing and running tests.
During development, you can: Before pushing your code changes make sure all **tests pass** and the **coverage is 100%**:
```bash
$ npm run test
```
**Tips:** During development you can:
- run only a subset of test files with `ava <glob>`, for example `ava test/mytestfile.test.js` - run only a subset of test files with `ava <glob>`, for example `ava test/mytestfile.test.js`
- run in watch mode with `ava -w` to automatically run a test file when you modify it - run in watch mode with `ava -w` to automatically run a test file when you modify it
- run only the test you are working on by adding [`.only` to the test definition](https://github.com/avajs/ava#running-specific-tests) - run only the test you are working on by adding [`.only` to the test definition](https://github.com/avajs/ava#running-specific-tests)
##### Unit Tests
```bash
$ npm run test:unit
```
##### Integration Tests
> [!IMPORTANT]
> Running the integration test requires you to install [Docker](https://docs.docker.com/engine/installation) on your machine.
```bash
$ npm run test:integration
```
### Commits ### Commits
All the [semantic-release](https://github.com/semantic-release) repositories use [Commitizen](https://github.com/commitizen/cz-cli) to help you create [valid commit messages](#commit-message-guidelines). All the [semantic-release](https://github.com/semantic-release) repositories use [Commitizen](https://github.com/commitizen/cz-cli) to help you create [valid commit messages](#commit-message-guidelines).
Assuming you have [installed Commitizen](https://github.com/commitizen/cz-cli#installing-the-command-line-tool), run `git cz` to start the interactive commit message CLI rather than `git commit` when committing. After staging your changes with `git add`, run `npm run cm` to start the interactive commit message CLI.

View File

@ -69,7 +69,7 @@ The table below shows which commit message gets you which release type when `sem
### Automation with CI ### Automation with CI
**semantic-release** is meant to be executed on the CI environment after every successful build on the release branch. **semantic-release** is meant to be executed on the CI environment after every successful build on the release branch.
This way no human is directly involved in the release process and the releases are guaranteed to be [unromantic and unsentimental](https://github.com/dominictarr/sentimental-versioning#readme). This way no human is directly involved in the release process and the releases are guaranteed to be [unromantic and unsentimental](http://sentimentalversioning.org).
### Triggering a release ### Triggering a release
@ -110,9 +110,9 @@ In order to use **semantic-release** you need:
## Documentation ## Documentation
- Usage - Usage
- [Getting started](docs/usage/getting-started.md) - [Getting started](docs/usage/getting-started.md#getting-started)
- [Installation](docs/usage/installation.md) - [Installation](docs/usage/installation.md#installation)
- [CI Configuration](docs/usage/ci-configuration.md) - [CI Configuration](docs/usage/ci-configuration.md#ci-configuration)
- [Configuration](docs/usage/configuration.md#configuration) - [Configuration](docs/usage/configuration.md#configuration)
- [Plugins](docs/usage/plugins.md) - [Plugins](docs/usage/plugins.md)
- [Workflow configuration](docs/usage/workflow-configuration.md) - [Workflow configuration](docs/usage/workflow-configuration.md)

View File

@ -2,7 +2,7 @@
## Usage ## Usage
- [Getting started](docs/usage/getting-started.md) - [Getting started](docs/usage/getting-started.md#getting-started)
- [Installation](docs/usage/installation.md) - [Installation](docs/usage/installation.md)
- [CI Configuration](docs/usage/ci-configuration.md) - [CI Configuration](docs/usage/ci-configuration.md)
- [Configuration](docs/usage/configuration.md) - [Configuration](docs/usage/configuration.md)

View File

@ -127,7 +127,7 @@ It allows to configure **semantic-release** to write errors to a specific stream
Type: `Object` `Boolean`<br> Type: `Object` `Boolean`<br>
An object with [`lastRelease`](#lastrelease), [`nextRelease`](#nextrelease), [`commits`](#commits) and [`releases`](#releases) if a release is published or `false` if no release was published. And object with [`lastRelease`](#lastrelease), [`nextRelease`](#nextrelease), [`commits`](#commits) and [`releases`](#releases) if a release is published or `false` if no release was published.
#### lastRelease #### lastRelease
@ -159,7 +159,7 @@ Example:
Type: `Array<Object>` Type: `Array<Object>`
The list of commit(s) included in the new release.<br> The list of commit included in the new release.<br>
Each commit object has the following properties: Each commit object has the following properties:
| Name | Type | Description | | Name | Type | Description |

View File

@ -179,9 +179,6 @@
- [semantic-release-coralogix](https://github.com/adobe/semantic-release-coralogix) - [semantic-release-coralogix](https://github.com/adobe/semantic-release-coralogix)
- `verifyConditions` Verified that required credentials are provided and API is accessible - `verifyConditions` Verified that required credentials are provided and API is accessible
- `publish` add a release tag to Coralogix - `publish` add a release tag to Coralogix
- [semantic-release-jira-notes](https://github.com/iamludal/semantic-release-jira-notes)
- `verifyConditions`: Validate the config options.
- `generateNotes`: Generate the release notes with links to JIRA issues.
- [semantic-release-major-tag](https://github.com/doteric/semantic-release-major-tag) - [semantic-release-major-tag](https://github.com/doteric/semantic-release-major-tag)
- `success` Create major version tag, for example `v1`. - `success` Create major version tag, for example `v1`.
- [semantic-release-yarn](https://github.com/hongaar/semantic-release-yarn) - [semantic-release-yarn](https://github.com/hongaar/semantic-release-yarn)
@ -190,7 +187,3 @@
- `prepare` Update the `package.json` version and create the package tarball. - `prepare` Update the `package.json` version and create the package tarball.
- `addChannel` Add a tag for the release. - `addChannel` Add a tag for the release.
- `publish` Publish to the npm registry. - `publish` Publish to the npm registry.
- [semantic-release-pub](https://github.com/zeshuaro/semantic-release-pub)
- `verifyConditions`: Verify the presence of the `pub.dev` authentication and release configuration
- `prepare`: Update the `pubspec.yaml` version
- `publish`: Publish the package onto the `pub.dev` registry

View File

@ -195,7 +195,7 @@ If you need more control over the timing of releases, see [Triggering a release]
## Can I set the initial release version of my package to `0.0.1`? ## Can I set the initial release version of my package to `0.0.1`?
This is not supported by semantic-release. [Semantic Versioning](https://semver.org/) rules apply differently to major version zero and supporting those differences is out of scope and not one of the goals of the semantic-release project. This is not supported by **semantic-release** as it's not considered a good practice, mostly because [Semantic Versioning](https://semver.org) rules applies differently to major version zero.
If your project is under heavy development, with frequent breaking changes, and is not production ready yet we recommend [publishing pre-releases](../recipes/release-workflow/pre-releases.md#publishing-pre-releases). If your project is under heavy development, with frequent breaking changes, and is not production ready yet we recommend [publishing pre-releases](../recipes/release-workflow/pre-releases.md#publishing-pre-releases).

View File

@ -1,6 +1,6 @@
# Node version requirement # Node version requirement
**semantic-release** is written using the latest [ECMAScript 2017](https://www.ecma-international.org/publications/standards/Ecma-262.htm) features, without transpilation which **requires Node version 20.8.1 or higher**. **semantic-release** is written using the latest [ECMAScript 2017](https://www.ecma-international.org/publications/standards/Ecma-262.htm) features, without transpilation which **requires Node version 18.0.0 or higher**.
**semantic-release** is meant to be used in a CI environment as a development support tool, not as a production dependency. **semantic-release** is meant to be used in a CI environment as a development support tool, not as a production dependency.
Therefore, the only constraint is to run the `semantic-release` in a CI environment providing version of Node that meets our version requirement. Therefore, the only constraint is to run the `semantic-release` in a CI environment providing version of Node that meets our version requirement.

View File

@ -40,17 +40,6 @@ The following three examples are the same.
} }
``` ```
- Via `release.config.cjs` file:
```js
/**
* @type {import('semantic-release').GlobalConfig}
*/
module.exports = {
branches: ["master", "next"],
};
```
- Via CLI argument: - Via CLI argument:
```bash ```bash

View File

@ -6,3 +6,16 @@ In order to use **semantic-release** you must follow these steps:
2. Configure your Continuous Integration service to [run **semantic-release**](./ci-configuration.md#run-semantic-release-only-after-all-tests-succeeded) 2. Configure your Continuous Integration service to [run **semantic-release**](./ci-configuration.md#run-semantic-release-only-after-all-tests-succeeded)
3. Configure your Git repository and package manager repository [authentication](ci-configuration.md#authentication) in your Continuous Integration service 3. Configure your Git repository and package manager repository [authentication](ci-configuration.md#authentication) in your Continuous Integration service
4. Configure **semantic-release** [options and plugins](./configuration.md#configuration) 4. Configure **semantic-release** [options and plugins](./configuration.md#configuration)
Alternatively those steps can be easily done with the [**semantic-release** interactive CLI](https://github.com/semantic-release/cli):
```bash
cd your-module
npx semantic-release-cli setup
```
![dialogue](../../media/semantic-release-cli.png)
See the [semantic-release-cli](https://github.com/semantic-release/cli#what-it-does) documentation for more details.
**Note**: only a limited number of options, CI services and plugins are currently supported by `semantic-release-cli`.

View File

@ -123,15 +123,12 @@ async function run(context, plugins) {
if (options.dryRun) { if (options.dryRun) {
logger.warn(`Skip ${nextRelease.gitTag} tag creation in dry-run mode`); logger.warn(`Skip ${nextRelease.gitTag} tag creation in dry-run mode`);
} else { } else {
await addNote({ channels: [...currentRelease.channels, nextRelease.channel] }, nextRelease.gitTag, { await addNote({ channels: [...currentRelease.channels, nextRelease.channel] }, nextRelease.gitHead, {
cwd, cwd,
env, env,
}); });
await push(options.repositoryUrl, { cwd, env }); await push(options.repositoryUrl, { cwd, env });
await pushNotes(options.repositoryUrl, nextRelease.gitTag, { await pushNotes(options.repositoryUrl, { cwd, env });
cwd,
env,
});
logger.success( logger.success(
`Add ${nextRelease.channel ? `channel ${nextRelease.channel}` : "default channel"} to tag ${ `Add ${nextRelease.channel ? `channel ${nextRelease.channel}` : "default channel"} to tag ${
nextRelease.gitTag nextRelease.gitTag
@ -206,9 +203,9 @@ async function run(context, plugins) {
} else { } else {
// Create the tag before calling the publish plugins as some require the tag to exists // Create the tag before calling the publish plugins as some require the tag to exists
await tag(nextRelease.gitTag, nextRelease.gitHead, { cwd, env }); await tag(nextRelease.gitTag, nextRelease.gitHead, { cwd, env });
await addNote({ channels: [nextRelease.channel] }, nextRelease.gitTag, { cwd, env }); await addNote({ channels: [nextRelease.channel] }, nextRelease.gitHead, { cwd, env });
await push(options.repositoryUrl, { cwd, env }); await push(options.repositoryUrl, { cwd, env });
await pushNotes(options.repositoryUrl, nextRelease.gitTag, { cwd, env }); await pushNotes(options.repositoryUrl, { cwd, env });
logger.success(`Created tag ${nextRelease.gitTag}`); logger.success(`Created tag ${nextRelease.gitTag}`);
} }

View File

@ -60,7 +60,7 @@ export function release({ release }) {
return release; return release;
} }
// The initial lastVersion is the last release from the base branch of `FIRST_RELEASE` (1.0.0) // The intial lastVersion is the last release from the base branch of `FIRST_RELEASE` (1.0.0)
let lastVersion = getLatestVersion(tagsToVersions(release[0].tags)) || FIRST_RELEASE; let lastVersion = getLatestVersion(tagsToVersions(release[0].tags)) || FIRST_RELEASE;
return release.map(({ name, tags, channel, ...rest }, idx) => { return release.map(({ name, tags, channel, ...rest }, idx) => {

View File

@ -1,10 +1,11 @@
import { dirname } from "node:path"; import { dirname, resolve } from "node:path";
import { fileURLToPath } from "node:url"; import { fileURLToPath } from "node:url";
import { createRequire } from "node:module";
import { castArray, isNil, isPlainObject, isString, pickBy } from "lodash-es"; import { castArray, isNil, isPlainObject, isString, pickBy } from "lodash-es";
import { readPackageUp } from "read-pkg-up"; import { readPackageUp } from "read-pkg-up";
import { cosmiconfig } from "cosmiconfig"; import { cosmiconfig } from "cosmiconfig";
import importFrom from "import-from-esm"; import resolveFrom from "resolve-from";
import debugConfig from "debug"; import debugConfig from "debug";
import { repoUrl } from "./git.js"; import { repoUrl } from "./git.js";
import PLUGINS_DEFINITIONS from "./definitions/plugins.js"; import PLUGINS_DEFINITIONS from "./definitions/plugins.js";
@ -13,6 +14,7 @@ import { parseConfig, validatePlugin } from "./plugins/utils.js";
const debug = debugConfig("semantic-release:config"); const debug = debugConfig("semantic-release:config");
const __dirname = dirname(fileURLToPath(import.meta.url)); const __dirname = dirname(fileURLToPath(import.meta.url));
const require = createRequire(import.meta.url);
const CONFIG_NAME = "release"; const CONFIG_NAME = "release";
@ -33,7 +35,7 @@ export default async (context, cliOptions) => {
options = { options = {
...(await castArray(extendPaths).reduce(async (eventualResult, extendPath) => { ...(await castArray(extendPaths).reduce(async (eventualResult, extendPath) => {
const result = await eventualResult; const result = await eventualResult;
const extendsOptions = (await importFrom.silent(__dirname, extendPath)) || (await importFrom(cwd, extendPath)); const extendsOptions = require(resolveFrom.silent(__dirname, extendPath) || resolveFrom(cwd, extendPath));
// For each plugin defined in a shareable config, save in `pluginsPath` the extendable config path, // For each plugin defined in a shareable config, save in `pluginsPath` the extendable config path,
// so those plugin will be loaded relative to the config file // so those plugin will be loaded relative to the config file
@ -74,8 +76,8 @@ export default async (context, cliOptions) => {
plugins: [ plugins: [
"@semantic-release/commit-analyzer", "@semantic-release/commit-analyzer",
"@semantic-release/release-notes-generator", "@semantic-release/release-notes-generator",
//"@semantic-release/npm", "@semantic-release/npm",
//"@semantic-release/github", "@semantic-release/github",
], ],
// Remove `null` and `undefined` options, so they can be replaced with default ones // Remove `null` and `undefined` options, so they can be replaced with default ones
...pickBy(options, (option) => !isNil(option)), ...pickBy(options, (option) => !isNil(option)),

View File

@ -36,7 +36,7 @@ function formatAuthUrl(protocol, repositoryUrl, gitCredentials) {
* @param {Object} context semantic-release context. * @param {Object} context semantic-release context.
* @param {String} authUrl Repository URL to verify * @param {String} authUrl Repository URL to verify
* *
* @return {String} The authUrl as is if the connection was successful, null otherwise * @return {String} The authUrl as is if the connection was successfull, null otherwise
*/ */
async function ensureValidAuthUrl({ cwd, env, branch }, authUrl) { async function ensureValidAuthUrl({ cwd, env, branch }, authUrl) {
try { try {

View File

@ -2,7 +2,6 @@ import gitLogParser from "git-log-parser";
import getStream from "get-stream"; import getStream from "get-stream";
import { execa } from "execa"; import { execa } from "execa";
import debugGit from "debug"; import debugGit from "debug";
import { merge } from "lodash-es";
import { GIT_NOTE_REF } from "./definitions/constants.js"; import { GIT_NOTE_REF } from "./definitions/constants.js";
const debug = debugGit("semantic-release:git"); const debug = debugGit("semantic-release:git");
@ -142,9 +141,13 @@ export async function fetch(repositoryUrl, branch, ciBranch, execaOptions) {
*/ */
export async function fetchNotes(repositoryUrl, execaOptions) { export async function fetchNotes(repositoryUrl, execaOptions) {
try { try {
await execa("git", ["fetch", "--unshallow", repositoryUrl, `+refs/notes/*:refs/notes/*`], execaOptions); await execa(
"git",
["fetch", "--unshallow", repositoryUrl, `+refs/notes/${GIT_NOTE_REF}:refs/notes/${GIT_NOTE_REF}`],
execaOptions
);
} catch { } catch {
await execa("git", ["fetch", repositoryUrl, `+refs/notes/*:refs/notes/*`], { await execa("git", ["fetch", repositoryUrl, `+refs/notes/${GIT_NOTE_REF}:refs/notes/${GIT_NOTE_REF}`], {
...execaOptions, ...execaOptions,
reject: false, reject: false,
}); });
@ -243,8 +246,8 @@ export async function push(repositoryUrl, execaOptions) {
* *
* @throws {Error} if the push failed. * @throws {Error} if the push failed.
*/ */
export async function pushNotes(repositoryUrl, ref, execaOptions) { export async function pushNotes(repositoryUrl, execaOptions) {
await execa("git", ["push", repositoryUrl, `refs/notes/${GIT_NOTE_REF}-${ref}`], execaOptions); await execa("git", ["push", repositoryUrl, `refs/notes/${GIT_NOTE_REF}`], execaOptions);
} }
/** /**
@ -304,26 +307,8 @@ export async function isBranchUpToDate(repositoryUrl, branch, execaOptions) {
* @return {Object} the parsed JSON note if there is one, an empty object otherwise. * @return {Object} the parsed JSON note if there is one, an empty object otherwise.
*/ */
export async function getNote(ref, execaOptions) { export async function getNote(ref, execaOptions) {
const handleError = (error) => {
if (error.exitCode === 1) {
return { stdout: "{}" };
}
debug(error);
throw error;
};
try { try {
return merge( return JSON.parse((await execa("git", ["notes", "--ref", GIT_NOTE_REF, "show", ref], execaOptions)).stdout);
JSON.parse(
// Used for retro-compatibility
(await execa("git", ["notes", "--ref", GIT_NOTE_REF, "show", ref], execaOptions).catch(handleError)).stdout
),
JSON.parse(
(await execa("git", ["notes", "--ref", `${GIT_NOTE_REF}-${ref}`, "show", ref], execaOptions).catch(handleError))
.stdout
)
);
} catch (error) { } catch (error) {
if (error.exitCode === 1) { if (error.exitCode === 1) {
return {}; return {};
@ -342,19 +327,5 @@ export async function getNote(ref, execaOptions) {
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
export async function addNote(note, ref, execaOptions) { export async function addNote(note, ref, execaOptions) {
await execa( await execa("git", ["notes", "--ref", GIT_NOTE_REF, "add", "-f", "-m", JSON.stringify(note), ref], execaOptions);
"git",
["notes", "--ref", `${GIT_NOTE_REF}-${ref}`, "add", "-f", "-m", JSON.stringify(note), ref],
execaOptions
);
}
/**
* Get the reference of a tag
*
* @param {String} tag The tag name to get the reference of.
* @param {Object} [execaOpts] Options to pass to `execa`.
**/
export async function getTagRef(tag, execaOptions) {
return (await execa("git", ["show-ref", tag, "--hash"], execaOptions)).stdout;
} }

Binary file not shown.

After

Width:  |  Height:  |  Size: 65 KiB

10798
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -6,8 +6,7 @@
"author": "Stephan Bönnemann <stephan@boennemann.me> (http://boennemann.me)", "author": "Stephan Bönnemann <stephan@boennemann.me> (http://boennemann.me)",
"ava": { "ava": {
"files": [ "files": [
"test/**/*.test.js", "test/**/*.test.js"
"!test/integration.test.js"
], ],
"nodeArguments": [ "nodeArguments": [
"--loader=testdouble", "--loader=testdouble",
@ -27,30 +26,29 @@
"Matt Travi <npm@travi.org> (https://matt.travi.org/)" "Matt Travi <npm@travi.org> (https://matt.travi.org/)"
], ],
"dependencies": { "dependencies": {
"@semantic-release/commit-analyzer": "^11.0.0", "@semantic-release/commit-analyzer": "^10.0.0",
"@semantic-release/error": "^4.0.0", "@semantic-release/error": "^4.0.0",
"@semantic-release/github": "^9.0.0", "@semantic-release/github": "^9.0.0",
"@semantic-release/npm": "^11.0.0", "@semantic-release/npm": "^10.0.2",
"@semantic-release/release-notes-generator": "^12.0.0", "@semantic-release/release-notes-generator": "^11.0.0",
"aggregate-error": "^5.0.0", "aggregate-error": "^5.0.0",
"cosmiconfig": "^9.0.0", "cosmiconfig": "^8.0.0",
"debug": "^4.0.0", "debug": "^4.0.0",
"env-ci": "^11.0.0", "env-ci": "^9.0.0",
"execa": "^8.0.0", "execa": "^8.0.0",
"figures": "^6.0.0", "figures": "^5.0.0",
"find-versions": "^5.1.0", "find-versions": "^5.1.0",
"get-stream": "^6.0.0", "get-stream": "^6.0.0",
"git-log-parser": "^1.2.0", "git-log-parser": "^1.2.0",
"hook-std": "^3.0.0", "hook-std": "^3.0.0",
"hosted-git-info": "^7.0.0", "hosted-git-info": "^7.0.0",
"import-from-esm": "^1.3.1",
"lodash-es": "^4.17.21", "lodash-es": "^4.17.21",
"marked": "^11.0.0", "marked": "^5.0.0",
"marked-terminal": "^7.0.0", "marked-terminal": "^5.1.1",
"micromatch": "^4.0.2", "micromatch": "^4.0.2",
"p-each-series": "^3.0.0", "p-each-series": "^3.0.0",
"p-reduce": "^3.0.0", "p-reduce": "^3.0.0",
"read-pkg-up": "^11.0.0", "read-pkg-up": "^10.0.0",
"resolve-from": "^5.0.0", "resolve-from": "^5.0.0",
"semver": "^7.3.2", "semver": "^7.3.2",
"semver-diff": "^4.0.0", "semver-diff": "^4.0.0",
@ -58,31 +56,26 @@
"yargs": "^17.5.1" "yargs": "^17.5.1"
}, },
"devDependencies": { "devDependencies": {
"ava": "6.1.1", "ava": "5.3.1",
"c8": "9.1.0", "c8": "8.0.1",
"clear-module": "4.1.2", "clear-module": "4.1.2",
"codecov": "3.8.3", "codecov": "3.8.3",
"cz-conventional-changelog": "3.3.0", "dockerode": "3.3.5",
"dockerode": "4.0.2",
"file-url": "4.0.0", "file-url": "4.0.0",
"fs-extra": "11.2.0", "fs-extra": "11.1.1",
"got": "14.2.0", "got": "13.0.0",
"js-yaml": "4.1.0", "js-yaml": "4.1.0",
"lockfile-lint": "4.12.1",
"ls-engines": "0.9.1",
"mockserver-client": "5.15.0", "mockserver-client": "5.15.0",
"nock": "13.5.1", "nock": "13.3.3",
"npm-run-all2": "6.1.2", "p-retry": "6.0.0",
"p-retry": "6.2.0", "prettier": "3.0.3",
"prettier": "3.2.5", "sinon": "16.0.0",
"publint": "0.2.7",
"sinon": "17.0.1",
"stream-buffers": "3.0.2", "stream-buffers": "3.0.2",
"tempy": "3.1.0", "tempy": "3.1.0",
"testdouble": "3.20.1" "testdouble": "3.18.0"
}, },
"engines": { "engines": {
"node": ">=20.8.1" "node": ">=18"
}, },
"files": [ "files": [
"bin", "bin",
@ -105,7 +98,7 @@
"version" "version"
], ],
"license": "MIT", "license": "MIT",
"main": "./index.js", "main": "index.js",
"types": "index.d.ts", "types": "index.d.ts",
"c8": { "c8": {
"include": [ "include": [
@ -120,14 +113,6 @@
], ],
"all": true "all": true
}, },
"lockfile-lint": {
"path": "package-lock.json",
"type": "npm",
"validate-https": true,
"allowed-hosts": [
"npm"
]
},
"prettier": { "prettier": {
"printWidth": 120, "printWidth": 120,
"trailingComma": "es5" "trailingComma": "es5"
@ -140,26 +125,18 @@
"type": "git", "type": "git",
"url": "git+https://github.com/semantic-release/semantic-release.git" "url": "git+https://github.com/semantic-release/semantic-release.git"
}, },
"config": {
"commitizen": {
"path": "./node_modules/cz-conventional-changelog"
}
},
"scripts": { "scripts": {
"codecov": "codecov -f coverage/coverage-final.json", "codecov": "codecov -f coverage/coverage-final.json",
"lint:prettier": "prettier --check \"*.{js,json,md}\" \".github/**/*.{md,yml}\" \"docs/**/*.md\" \"{bin,lib,test}/**/*.js\"", "lint": "prettier --check \"*.{js,json,md}\" \".github/**/*.{md,yml}\" \"docs/**/*.md\" \"{bin,lib,test}/**/*.js\"",
"lint:prettier:fix": "prettier --write \"*.{js,json,md}\" \".github/**/*.{md,yml}\" \"docs/**/*.md\" \"{bin,lib,test}/**/*.js\"", "lint:fix": "prettier --write \"*.{js,json,md}\" \".github/**/*.{md,yml}\" \"docs/**/*.md\" \"{bin,lib,test}/**/*.js\"",
"lint:lockfile": "lockfile-lint", "pretest": "npm run lint",
"lint:engines": "ls-engines",
"lint:publish": "publint --strict",
"semantic-release": "./bin/semantic-release.js", "semantic-release": "./bin/semantic-release.js",
"test": "npm-run-all --print-label --parallel lint:* --parallel test:*", "test": "c8 ava --verbose",
"test:unit": "c8 ava --verbose", "test:ci": "c8 ava --verbose"
"test:integration": "ava --verbose test/integration.test.js"
}, },
"renovate": { "renovate": {
"extends": [ "extends": [
"github>semantic-release/.github:renovate-config" "github>semantic-release/.github"
] ]
} }
} }

View File

@ -389,92 +389,6 @@ test.serial('Read configuration from an array of paths in "extends"', async (t)
t.deepEqual(result, { options: expectedOptions, plugins: pluginsConfig }); t.deepEqual(result, { options: expectedOptions, plugins: pluginsConfig });
}); });
test.serial('Read configuration from an array of CJS files in "extends"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const { cwd } = await gitRepo();
const pkgOptions = { extends: ["./shareable1.cjs", "./shareable2.cjs"] };
const options1 = {
verifyRelease: "verifyRelease1",
analyzeCommits: { path: "analyzeCommits1", param: "analyzeCommits_param1" },
branches: ["test_branch"],
repositoryUrl: "https://host.null/owner/module.git",
};
const options2 = {
verifyRelease: "verifyRelease2",
generateNotes: "generateNotes2",
analyzeCommits: { path: "analyzeCommits2", param: "analyzeCommits_param2" },
branches: ["test_branch"],
tagFormat: `v\${version}`,
plugins: false,
};
// Create package.json and shareable.json in repository root
await outputJson(path.resolve(cwd, "package.json"), { release: pkgOptions });
await writeFile(path.resolve(cwd, "shareable1.cjs"), `module.exports = ${JSON.stringify(options1)}`);
await writeFile(path.resolve(cwd, "shareable2.cjs"), `module.exports = ${JSON.stringify(options2)}`);
const expectedOptions = { ...options1, ...options2, branches: ["test_branch"] };
// Verify the plugins module is called with the plugin options from shareable1.mjs and shareable2.mjs
td.when(
plugins(
{ options: expectedOptions, cwd },
{
verifyRelease1: "./shareable1.cjs",
verifyRelease2: "./shareable2.cjs",
generateNotes2: "./shareable2.cjs",
analyzeCommits1: "./shareable1.cjs",
analyzeCommits2: "./shareable2.cjs",
}
)
).thenResolve(pluginsConfig);
const result = await t.context.getConfig({ cwd });
// Verify the options contains the plugin config from shareable1.json and shareable2.json
t.deepEqual(result, { options: expectedOptions, plugins: pluginsConfig });
});
test.serial('Read configuration from an array of ESM files in "extends"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo
const { cwd } = await gitRepo();
const pkgOptions = { extends: ["./shareable1.mjs", "./shareable2.mjs"] };
const options1 = {
verifyRelease: "verifyRelease1",
analyzeCommits: { path: "analyzeCommits1", param: "analyzeCommits_param1" },
branches: ["test_branch"],
repositoryUrl: "https://host.null/owner/module.git",
};
const options2 = {
verifyRelease: "verifyRelease2",
generateNotes: "generateNotes2",
analyzeCommits: { path: "analyzeCommits2", param: "analyzeCommits_param2" },
branches: ["test_branch"],
tagFormat: `v\${version}`,
plugins: false,
};
// Create package.json and shareable.json in repository root
await outputJson(path.resolve(cwd, "package.json"), { release: pkgOptions });
await writeFile(path.resolve(cwd, "shareable1.mjs"), `export default ${JSON.stringify(options1)}`);
await writeFile(path.resolve(cwd, "shareable2.mjs"), `export default ${JSON.stringify(options2)}`);
const expectedOptions = { ...options1, ...options2, branches: ["test_branch"] };
// Verify the plugins module is called with the plugin options from shareable1.mjs and shareable2.mjs
td.when(
plugins(
{ options: expectedOptions, cwd },
{
verifyRelease1: "./shareable1.mjs",
verifyRelease2: "./shareable2.mjs",
generateNotes2: "./shareable2.mjs",
analyzeCommits1: "./shareable1.mjs",
analyzeCommits2: "./shareable2.mjs",
}
)
).thenResolve(pluginsConfig);
const result = await t.context.getConfig({ cwd });
// Verify the options contains the plugin config from shareable1.json and shareable2.json
t.deepEqual(result, { options: expectedOptions, plugins: pluginsConfig });
});
test.serial('Prioritize configuration from config file over "extends"', async (t) => { test.serial('Prioritize configuration from config file over "extends"', async (t) => {
// Create a git repository, set the current working directory at the root of the repo // Create a git repository, set the current working directory at the root of the repo
const { cwd } = await gitRepo(); const { cwd } = await gitRepo();

View File

@ -43,7 +43,7 @@ export async function initGit(withRemote) {
* *
* @param {Boolean} withRemote `true` to create a shallow clone of a bare repository. * @param {Boolean} withRemote `true` to create a shallow clone of a bare repository.
* @param {String} [branch='master'] The branch to initialize. * @param {String} [branch='master'] The branch to initialize.
* @return {Promise<Object>} The path of the clone if `withRemote` is `true`, the path of the repository otherwise. * @return {String} The path of the clone if `withRemote` is `true`, the path of the repository otherwise.
*/ */
export async function gitRepo(withRemote, branch = "master") { export async function gitRepo(withRemote, branch = "master") {
let { cwd, repositoryUrl } = await initGit(withRemote); let { cwd, repositoryUrl } = await initGit(withRemote);
@ -98,7 +98,7 @@ export async function gitCommits(messages, execaOptions) {
/** /**
* Get the list of parsed commits since a git reference. * Get the list of parsed commits since a git reference.
* *
* @param {String} [from] Git reference from which to search commits. * @param {String} [from] Git reference from which to seach commits.
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
* *
* @return {Array<Object>} The list of parsed commits. * @return {Array<Object>} The list of parsed commits.
@ -240,7 +240,7 @@ export async function gitTagHead(tagName, execaOptions) {
* Get the first commit sha referenced by the tag `tagName` in the remote repository. * Get the first commit sha referenced by the tag `tagName` in the remote repository.
* *
* @param {String} repositoryUrl The repository remote URL. * @param {String} repositoryUrl The repository remote URL.
* @param {String} tagName The tag name to search for. * @param {String} tagName The tag name to seach for.
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
* *
* @return {String} The sha of the commit associated with `tagName` on the remote repository. * @return {String} The sha of the commit associated with `tagName` on the remote repository.
@ -315,7 +315,7 @@ export async function rebase(ref, execaOptions) {
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
export async function gitAddNote(note, ref, execaOptions) { export async function gitAddNote(note, ref, execaOptions) {
await execa("git", ["notes", "--ref", `${GIT_NOTE_REF}-${ref}`, "add", "-m", note, ref], execaOptions); await execa("git", ["notes", "--ref", GIT_NOTE_REF, "add", "-m", note, ref], execaOptions);
} }
/** /**
@ -325,5 +325,5 @@ export async function gitAddNote(note, ref, execaOptions) {
* @param {Object} [execaOpts] Options to pass to `execa`. * @param {Object} [execaOpts] Options to pass to `execa`.
*/ */
export async function gitGetNote(ref, execaOptions) { export async function gitGetNote(ref, execaOptions) {
return (await execa("git", ["notes", "--ref", `${GIT_NOTE_REF}-${ref}`, "show", ref], execaOptions)).stdout; return (await execa("git", ["notes", "--ref", GIT_NOTE_REF, "show", ref], execaOptions)).stdout;
} }

View File

@ -11,7 +11,7 @@ test("Replace multiple sensitive environment variable values", (t) => {
); );
}); });
test("Replace multiple occurrences of sensitive environment variable values", (t) => { test("Replace multiple occurences of sensitive environment variable values", (t) => {
const env = { secretKey: "secret" }; const env = { secretKey: "secret" };
t.is( t.is(
hideSensitive(env)(`https://user:${env.secretKey}@host.com?token=${env.secretKey}`), hideSensitive(env)(`https://user:${env.secretKey}@host.com?token=${env.secretKey}`),

View File

@ -35,7 +35,7 @@ let env;
// Environment variables used only for the local npm command used to do verification // Environment variables used only for the local npm command used to do verification
const npmTestEnv = { const npmTestEnv = {
...processEnvWithoutGitHubActionsVariables, ...process.env,
...npmRegistry.authEnv(), ...npmRegistry.authEnv(),
npm_config_registry: npmRegistry.url, npm_config_registry: npmRegistry.url,
}; };
@ -516,8 +516,8 @@ test("Pass options via CLI arguments", async (t) => {
}); });
test("Run via JS API", async (t) => { test("Run via JS API", async (t) => {
await td.replaceEsm("../lib/logger", null, { log: () => {}, error: () => {}, stdout: () => {} }); td.replace("../lib/logger", { log: () => {}, error: () => {}, stdout: () => {} });
await td.replaceEsm("env-ci", null, () => ({ isCi: true, branch: "master", isPr: false })); td.replace("env-ci", () => ({ isCi: true, branch: "master", isPr: false }));
const semanticRelease = (await import("../index.js")).default; const semanticRelease = (await import("../index.js")).default;
const packageName = "test-js-api"; const packageName = "test-js-api";
const owner = "git"; const owner = "git";

View File

@ -96,7 +96,7 @@ test("Normalize and load plugin from function", async (t) => {
t.is(typeof plugin, "function"); t.is(typeof plugin, "function");
}); });
test("Normalize and load plugin that returns multiple functions", async (t) => { test("Normalize and load plugin that retuns multiple functions", async (t) => {
const plugin = await normalize( const plugin = await normalize(
{ cwd, options: {}, logger: t.context.logger }, { cwd, options: {}, logger: t.context.logger },
"verifyConditions", "verifyConditions",

View File

@ -52,7 +52,7 @@ test("validateStep: optional plugin configuration", (t) => {
t.true(validateStep(type, { path: () => {}, options: "value" })); t.true(validateStep(type, { path: () => {}, options: "value" }));
t.false(validateStep(type, { path: null })); t.false(validateStep(type, { path: null }));
// Considered as an Array of 2 definitions and not as one Array definition in case of a multiple plugin type // Considered as an Array of 2 definitions and not as one Array definition in case of a muliple plugin type
t.false(validateStep(type, [() => {}, { options: "value" }])); t.false(validateStep(type, [() => {}, { options: "value" }]));
t.false(validateStep(type, ["plugin-path.js", { options: "value" }])); t.false(validateStep(type, ["plugin-path.js", { options: "value" }]));
@ -134,7 +134,7 @@ test("validateStep: required plugin configuration", (t) => {
t.true(validateStep(type, { path: () => {}, options: "value" })); t.true(validateStep(type, { path: () => {}, options: "value" }));
t.false(validateStep(type, { path: null })); t.false(validateStep(type, { path: null }));
// Considered as an Array of 2 definitions and not as one Array definition in the case of a multiple plugin type // Considered as an Array of 2 definitions and not as one Array definition in the case of a muliple plugin type
t.false(validateStep(type, [() => {}, { options: "value" }])); t.false(validateStep(type, [() => {}, { options: "value" }]));
t.false(validateStep(type, ["plugin-path.js", { options: "value" }])); t.false(validateStep(type, ["plugin-path.js", { options: "value" }]));