mirror of
https://github.com/actions/setup-node.git
synced 2025-04-21 00:51:00 +00:00
Merge branch 'actions:main' into revert-2-revert-1-Devlynnx-patch-1
This commit is contained in:
commit
f89960380a
33 changed files with 5393 additions and 3186 deletions
2
.github/workflows/check-dist.yml
vendored
2
.github/workflows/check-dist.yml
vendored
|
@ -1,4 +1,4 @@
|
|||
name: Check dist/
|
||||
name: Check dist
|
||||
|
||||
on:
|
||||
push:
|
||||
|
|
109
.github/workflows/e2e-cache.yml
vendored
109
.github/workflows/e2e-cache.yml
vendored
|
@ -134,3 +134,112 @@ jobs:
|
|||
- name: Verify node and yarn
|
||||
run: __tests__/verify-node.sh "${{ matrix.node-version }}"
|
||||
shell: bash
|
||||
|
||||
yarn-subprojects:
|
||||
name: Test yarn subprojects
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12, 14, 16]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: prepare sub-projects
|
||||
run: __tests__/prepare-yarn-subprojects.sh yarn1
|
||||
|
||||
# expect
|
||||
# - no errors
|
||||
# - log
|
||||
# ##[debug]Cache Paths:
|
||||
# ##[debug]["sub2/.yarn/cache","sub3/.yarn/cache","../../../.cache/yarn/v6"]
|
||||
- name: Setup Node
|
||||
uses: ./
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'yarn'
|
||||
cache-dependency-path: |
|
||||
**/*.lock
|
||||
yarn.lock
|
||||
|
||||
yarn-subprojects-berry-local:
|
||||
name: Test yarn subprojects all locally managed
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12, 14, 16]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: prepare sub-projects
|
||||
run: __tests__/prepare-yarn-subprojects.sh keepcache keepcache
|
||||
|
||||
# expect
|
||||
# - no errors
|
||||
# - log
|
||||
# ##[info]All dependencies are managed locally by yarn3, the previous cache can be used
|
||||
# ##[debug]["node-cache-Linux-yarn-401024703386272f1a950c9f014cbb1bb79a7a5b6e1fb00e8b90d06734af41ee","node-cache-Linux-yarn"]
|
||||
- name: Setup Node
|
||||
uses: ./
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'yarn'
|
||||
cache-dependency-path: |
|
||||
sub2/*.lock
|
||||
sub3/*.lock
|
||||
|
||||
yarn-subprojects-berry-global:
|
||||
name: Test yarn subprojects some locally managed
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12, 14, 16]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: prepare sub-projects
|
||||
run: __tests__/prepare-yarn-subprojects.sh global
|
||||
|
||||
# expect
|
||||
# - no errors
|
||||
# - log must
|
||||
# ##[debug]"/home/runner/work/setup-node-test/setup-node-test/sub2" dependencies are managed by yarn 3 locally
|
||||
# ##[debug]"/home/runner/work/setup-node-test/setup-node-test/sub3" dependencies are not managed by yarn 3 locally
|
||||
- name: Setup Node
|
||||
uses: ./
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'yarn'
|
||||
cache-dependency-path: |
|
||||
sub2/*.lock
|
||||
sub3/*.lock
|
||||
|
||||
yarn-subprojects-berry-git:
|
||||
name: Test yarn subprojects managed by git
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [12, 14, 16]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: prepare sub-projects
|
||||
run: /bin/bash __tests__/prepare-yarn-subprojects.sh keepcache
|
||||
|
||||
# expect
|
||||
# - no errors
|
||||
# - log
|
||||
# [debug]"/home/runner/work/setup-node-test/setup-node-test/sub2" has .yarn/cache - dependencies are kept in the repository
|
||||
# [debug]"/home/runner/work/setup-node-test/setup-node-test/sub3" has .yarn/cache - dependencies are kept in the repository
|
||||
# [debug]["node-cache-Linux-yarn-401024703386272f1a950c9f014cbb1bb79a7a5b6e1fb00e8b90d06734af41ee"]
|
||||
- name: Setup Node
|
||||
uses: ./
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
cache: 'yarn'
|
||||
cache-dependency-path: |
|
||||
sub2/*.lock
|
||||
sub3/*.lock
|
||||
|
|
3
.github/workflows/versions.yml
vendored
3
.github/workflows/versions.yml
vendored
|
@ -158,7 +158,8 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
node-version-file: [.nvmrc, .tool-versions, package.json]
|
||||
node-version-file:
|
||||
[.nvmrc, .tool-versions, .tool-versions-node, package.json]
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Remove volta from package.json
|
||||
|
|
2
.licenses/npm/@actions/cache.dep.yml
generated
2
.licenses/npm/@actions/cache.dep.yml
generated
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
name: "@actions/cache"
|
||||
version: 3.0.4
|
||||
version: 3.2.1
|
||||
type: npm
|
||||
summary: Actions cache lib
|
||||
homepage: https://github.com/actions/toolkit/tree/main/packages/cache
|
||||
|
|
4
.licenses/npm/@azure/abort-controller.dep.yml
generated
4
.licenses/npm/@azure/abort-controller.dep.yml
generated
|
@ -1,9 +1,9 @@
|
|||
---
|
||||
name: "@azure/abort-controller"
|
||||
version: 1.0.4
|
||||
version: 1.1.0
|
||||
type: npm
|
||||
summary: Microsoft Azure SDK for JavaScript - Aborter
|
||||
homepage: https://github.com/Azure/azure-sdk-for-js/tree/master/sdk/core/abort-controller/README.md
|
||||
homepage: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller/README.md
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
|
|
2
.licenses/npm/@azure/ms-rest-js.dep.yml
generated
2
.licenses/npm/@azure/ms-rest-js.dep.yml
generated
|
@ -1,6 +1,6 @@
|
|||
---
|
||||
name: "@azure/ms-rest-js"
|
||||
version: 2.6.6
|
||||
version: 2.7.0
|
||||
type: npm
|
||||
summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client
|
||||
libraries generated using AutoRest
|
||||
|
|
34
.licenses/npm/ip-regex.dep.yml
generated
34
.licenses/npm/ip-regex.dep.yml
generated
|
@ -1,34 +0,0 @@
|
|||
---
|
||||
name: ip-regex
|
||||
version: 2.1.0
|
||||
type: npm
|
||||
summary: Regular expression for matching IP addresses (IPv4 & IPv6)
|
||||
homepage: https://github.com/sindresorhus/ip-regex#readme
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: license
|
||||
text: |
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
- sources: readme.md
|
||||
text: MIT © [Sindre Sorhus](https://sindresorhus.com)
|
||||
notices: []
|
43
.licenses/npm/psl.dep.yml
generated
43
.licenses/npm/psl.dep.yml
generated
|
@ -1,43 +0,0 @@
|
|||
---
|
||||
name: psl
|
||||
version: 1.8.0
|
||||
type: npm
|
||||
summary: Domain name parser based on the Public Suffix List
|
||||
homepage: https://github.com/lupomontero/psl#readme
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
text: |
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017 Lupo Montero lupomontero@gmail.com
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
- sources: README.md
|
||||
text: |-
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017 Lupo Montero <lupomontero@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
notices: []
|
34
.licenses/npm/punycode.dep.yml
generated
34
.licenses/npm/punycode.dep.yml
generated
|
@ -1,34 +0,0 @@
|
|||
---
|
||||
name: punycode
|
||||
version: 2.1.1
|
||||
type: npm
|
||||
summary: A robust Punycode converter that fully complies to RFC 3492 and RFC 5891,
|
||||
and works on nearly all JavaScript platforms.
|
||||
homepage: https://mths.be/punycode
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE-MIT.txt
|
||||
text: |
|
||||
Copyright Mathias Bynens <https://mathiasbynens.be/>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
- sources: README.md
|
||||
text: Punycode.js is available under the [MIT](https://mths.be/mit) license.
|
||||
notices: []
|
|
@ -1,9 +1,9 @@
|
|||
---
|
||||
name: semver
|
||||
version: 6.1.2
|
||||
version: 6.3.1
|
||||
type: npm
|
||||
summary: The semantic version parser used by npm.
|
||||
homepage: https://github.com/npm/node-semver#readme
|
||||
homepage:
|
||||
license: isc
|
||||
licenses:
|
||||
- sources: LICENSE
|
23
.licenses/npm/tough-cookie.dep.yml
generated
23
.licenses/npm/tough-cookie.dep.yml
generated
|
@ -1,23 +0,0 @@
|
|||
---
|
||||
name: tough-cookie
|
||||
version: 3.0.1
|
||||
type: npm
|
||||
summary: RFC6265 Cookies and Cookie Jar for node.js
|
||||
homepage: https://github.com/salesforce/tough-cookie
|
||||
license: bsd-3-clause
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
text: |
|
||||
Copyright (c) 2015, Salesforce.com, Inc.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
notices: []
|
|
@ -99,7 +99,7 @@ For information regarding locally cached versions of Node.js on GitHub hosted ru
|
|||
|
||||
### Supported version syntax
|
||||
|
||||
The `node-version` input supports the Semantic Versioning Specification, for more detailed examples please refer to the [documentation](https://github.com/npm/node-semver).
|
||||
The `node-version` input supports the Semantic Versioning Specification, for more detailed examples please refer to [the semver package documentation](https://github.com/npm/node-semver).
|
||||
|
||||
Examples:
|
||||
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import os from 'os';
|
||||
import * as fs from 'fs';
|
||||
import fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as core from '@actions/core';
|
||||
import * as io from '@actions/io';
|
||||
import * as auth from '../src/authutil';
|
||||
import * as cacheUtils from '../src/cache-utils';
|
||||
|
||||
let rcFile: string;
|
||||
|
||||
|
|
|
@ -32,13 +32,13 @@ describe('cache-restore', () => {
|
|||
|
||||
function findCacheFolder(command: string) {
|
||||
switch (command) {
|
||||
case utils.supportedPackageManagers.npm.getCacheFolderCommand:
|
||||
case 'npm config get cache':
|
||||
return npmCachePath;
|
||||
case utils.supportedPackageManagers.pnpm.getCacheFolderCommand:
|
||||
case 'pnpm store path --silent':
|
||||
return pnpmCachePath;
|
||||
case utils.supportedPackageManagers.yarn1.getCacheFolderCommand:
|
||||
case 'yarn cache dir':
|
||||
return yarn1CachePath;
|
||||
case utils.supportedPackageManagers.yarn2.getCacheFolderCommand:
|
||||
case 'yarn config get cacheFolder':
|
||||
return yarn2CachePath;
|
||||
default:
|
||||
return 'packge/not/found';
|
||||
|
@ -108,7 +108,7 @@ describe('cache-restore', () => {
|
|||
it.each([['npm7'], ['npm6'], ['pnpm6'], ['yarn1'], ['yarn2'], ['random']])(
|
||||
'Throw an error because %s is not supported',
|
||||
async packageManager => {
|
||||
await expect(restoreCache(packageManager)).rejects.toThrow(
|
||||
await expect(restoreCache(packageManager, '')).rejects.toThrow(
|
||||
`Caching for '${packageManager}' is not supported`
|
||||
);
|
||||
}
|
||||
|
@ -132,7 +132,7 @@ describe('cache-restore', () => {
|
|||
}
|
||||
});
|
||||
|
||||
await restoreCache(packageManager);
|
||||
await restoreCache(packageManager, '');
|
||||
expect(hashFilesSpy).toHaveBeenCalled();
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache restored from key: node-cache-${platform}-${packageManager}-${fileHash}`
|
||||
|
@ -163,7 +163,7 @@ describe('cache-restore', () => {
|
|||
});
|
||||
|
||||
restoreCacheSpy.mockImplementationOnce(() => undefined);
|
||||
await restoreCache(packageManager);
|
||||
await restoreCache(packageManager, '');
|
||||
expect(hashFilesSpy).toHaveBeenCalled();
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`${packageManager} cache is not found`
|
||||
|
|
|
@ -107,18 +107,20 @@ describe('run', () => {
|
|||
describe('Validate unchanged cache is not saved', () => {
|
||||
it('should not save cache for yarn1', async () => {
|
||||
inputs['cache'] = 'yarn';
|
||||
getStateSpy.mockImplementation(() => yarnFileHash);
|
||||
getCommandOutputSpy
|
||||
.mockImplementationOnce(() => '1.2.3')
|
||||
.mockImplementationOnce(() => `${commonPath}/yarn1`);
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePrimaryKey || key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(2);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn1`);
|
||||
expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 1.2.3');
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.`
|
||||
);
|
||||
|
@ -127,18 +129,20 @@ describe('run', () => {
|
|||
|
||||
it('should not save cache for yarn2', async () => {
|
||||
inputs['cache'] = 'yarn';
|
||||
getStateSpy.mockImplementation(() => yarnFileHash);
|
||||
getCommandOutputSpy
|
||||
.mockImplementationOnce(() => '2.2.3')
|
||||
.mockImplementationOnce(() => `${commonPath}/yarn2`);
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePrimaryKey || key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(2);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn2`);
|
||||
expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 2.2.3');
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.`
|
||||
);
|
||||
|
@ -147,35 +151,40 @@ describe('run', () => {
|
|||
|
||||
it('should not save cache for npm', async () => {
|
||||
inputs['cache'] = 'npm';
|
||||
getStateSpy.mockImplementation(() => npmFileHash);
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePrimaryKey || key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${npmFileHash}, not saving cache.`
|
||||
);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(setFailedSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not save cache for pnpm', async () => {
|
||||
inputs['cache'] = 'pnpm';
|
||||
getStateSpy.mockImplementation(() => pnpmFileHash);
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/pnpm`);
|
||||
getStateSpy.mockImplementation(key =>
|
||||
key === State.CachePrimaryKey || key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`pnpm path is ${commonPath}/pnpm`);
|
||||
expect(infoSpy).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${pnpmFileHash}, not saving cache.`
|
||||
);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(setFailedSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
@ -183,24 +192,22 @@ describe('run', () => {
|
|||
describe('action saves the cache', () => {
|
||||
it('saves cache from yarn 1', async () => {
|
||||
inputs['cache'] = 'yarn';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return yarnFileHash;
|
||||
} else {
|
||||
return npmFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy
|
||||
.mockImplementationOnce(() => '1.2.3')
|
||||
.mockImplementationOnce(() => `${commonPath}/yarn1`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? npmFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(2);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn1`);
|
||||
expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 1.2.3');
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.`
|
||||
);
|
||||
|
@ -213,24 +220,22 @@ describe('run', () => {
|
|||
|
||||
it('saves cache from yarn 2', async () => {
|
||||
inputs['cache'] = 'yarn';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return yarnFileHash;
|
||||
} else {
|
||||
return npmFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy
|
||||
.mockImplementationOnce(() => '2.2.3')
|
||||
.mockImplementationOnce(() => `${commonPath}/yarn2`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? npmFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(2);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`yarn path is ${commonPath}/yarn2`);
|
||||
expect(debugSpy).toHaveBeenCalledWith('Consumed yarn version is 2.2.3');
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${yarnFileHash}, not saving cache.`
|
||||
);
|
||||
|
@ -243,21 +248,22 @@ describe('run', () => {
|
|||
|
||||
it('saves cache from npm', async () => {
|
||||
inputs['cache'] = 'npm';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return npmFileHash;
|
||||
} else {
|
||||
return yarnFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? npmFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${npmFileHash}, not saving cache.`
|
||||
);
|
||||
|
@ -270,21 +276,22 @@ describe('run', () => {
|
|||
|
||||
it('saves cache from pnpm', async () => {
|
||||
inputs['cache'] = 'pnpm';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return pnpmFileHash;
|
||||
} else {
|
||||
return npmFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/pnpm`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? pnpmFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? npmFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
|
||||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`pnpm path is ${commonPath}/pnpm`);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${pnpmFileHash}, not saving cache.`
|
||||
);
|
||||
|
@ -297,14 +304,15 @@ describe('run', () => {
|
|||
|
||||
it('save with -1 cacheId , should not fail workflow', async () => {
|
||||
inputs['cache'] = 'npm';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return npmFileHash;
|
||||
} else {
|
||||
return yarnFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? npmFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
saveCacheSpy.mockImplementation(() => {
|
||||
return -1;
|
||||
});
|
||||
|
@ -312,9 +320,9 @@ describe('run', () => {
|
|||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${npmFileHash}, not saving cache.`
|
||||
);
|
||||
|
@ -327,14 +335,15 @@ describe('run', () => {
|
|||
|
||||
it('saves with error from toolkit, should fail workflow', async () => {
|
||||
inputs['cache'] = 'npm';
|
||||
getStateSpy.mockImplementation((name: string) => {
|
||||
if (name === State.CacheMatchedKey) {
|
||||
return npmFileHash;
|
||||
} else {
|
||||
return yarnFileHash;
|
||||
}
|
||||
});
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `${commonPath}/npm`);
|
||||
getStateSpy.mockImplementation((key: string) =>
|
||||
key === State.CacheMatchedKey
|
||||
? npmFileHash
|
||||
: key === State.CachePrimaryKey
|
||||
? yarnFileHash
|
||||
: key === State.CachePaths
|
||||
? '["/foo/bar"]'
|
||||
: 'not expected'
|
||||
);
|
||||
saveCacheSpy.mockImplementation(() => {
|
||||
throw new cache.ValidationError('Validation failed');
|
||||
});
|
||||
|
@ -342,9 +351,9 @@ describe('run', () => {
|
|||
await run();
|
||||
|
||||
expect(getInputSpy).toHaveBeenCalled();
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(2);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
expect(debugSpy).toHaveBeenCalledWith(`npm path is ${commonPath}/npm`);
|
||||
expect(getStateSpy).toHaveBeenCalledTimes(3);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(0);
|
||||
expect(debugSpy).toHaveBeenCalledTimes(0);
|
||||
expect(infoSpy).not.toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${npmFileHash}, not saving cache.`
|
||||
);
|
||||
|
|
|
@ -2,7 +2,18 @@ import * as core from '@actions/core';
|
|||
import * as cache from '@actions/cache';
|
||||
import path from 'path';
|
||||
import * as utils from '../src/cache-utils';
|
||||
import {PackageManagerInfo, isCacheFeatureAvailable} from '../src/cache-utils';
|
||||
import {
|
||||
PackageManagerInfo,
|
||||
isCacheFeatureAvailable,
|
||||
supportedPackageManagers,
|
||||
getCommandOutput,
|
||||
resetProjectDirectoriesMemoized
|
||||
} from '../src/cache-utils';
|
||||
import fs from 'fs';
|
||||
import * as cacheUtils from '../src/cache-utils';
|
||||
import * as glob from '@actions/glob';
|
||||
import {Globber} from '@actions/glob';
|
||||
import {MockGlobber} from './mock/glob-mock';
|
||||
|
||||
describe('cache-utils', () => {
|
||||
const versionYarn1 = '1.2.3';
|
||||
|
@ -12,8 +23,10 @@ describe('cache-utils', () => {
|
|||
let isFeatureAvailable: jest.SpyInstance;
|
||||
let info: jest.SpyInstance;
|
||||
let warningSpy: jest.SpyInstance;
|
||||
let fsRealPathSyncSpy: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
console.log('::stop-commands::stoptoken');
|
||||
process.env['GITHUB_WORKSPACE'] = path.join(__dirname, 'data');
|
||||
debugSpy = jest.spyOn(core, 'debug');
|
||||
debugSpy.mockImplementation(msg => {});
|
||||
|
@ -24,13 +37,29 @@ describe('cache-utils', () => {
|
|||
isFeatureAvailable = jest.spyOn(cache, 'isFeatureAvailable');
|
||||
|
||||
getCommandOutputSpy = jest.spyOn(utils, 'getCommandOutput');
|
||||
|
||||
fsRealPathSyncSpy = jest.spyOn(fs, 'realpathSync');
|
||||
fsRealPathSyncSpy.mockImplementation(dirName => {
|
||||
return dirName;
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
jest.clearAllMocks();
|
||||
//jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
console.log('::stoptoken::');
|
||||
jest.restoreAllMocks();
|
||||
}, 100000);
|
||||
|
||||
describe('getPackageManagerInfo', () => {
|
||||
it.each<[string, PackageManagerInfo | null]>([
|
||||
['npm', utils.supportedPackageManagers.npm],
|
||||
['pnpm', utils.supportedPackageManagers.pnpm],
|
||||
['yarn', utils.supportedPackageManagers.yarn1],
|
||||
['yarn', utils.supportedPackageManagers.yarn],
|
||||
['yarn1', null],
|
||||
['yarn2', null],
|
||||
['npm7', null]
|
||||
|
@ -72,4 +101,263 @@ describe('cache-utils', () => {
|
|||
jest.resetAllMocks();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe('getCacheDirectoriesPaths', () => {
|
||||
let existsSpy: jest.SpyInstance;
|
||||
let lstatSpy: jest.SpyInstance;
|
||||
let globCreateSpy: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
existsSpy = jest.spyOn(fs, 'existsSync');
|
||||
existsSpy.mockImplementation(() => true);
|
||||
|
||||
lstatSpy = jest.spyOn(fs, 'lstatSync');
|
||||
lstatSpy.mockImplementation(arg => ({
|
||||
isDirectory: () => true
|
||||
}));
|
||||
|
||||
globCreateSpy = jest.spyOn(glob, 'create');
|
||||
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create(['/foo', '/bar'])
|
||||
);
|
||||
|
||||
resetProjectDirectoriesMemoized();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
existsSpy.mockRestore();
|
||||
lstatSpy.mockRestore();
|
||||
globCreateSpy.mockRestore();
|
||||
});
|
||||
|
||||
it.each([
|
||||
[supportedPackageManagers.npm, ''],
|
||||
[supportedPackageManagers.npm, '/dir/file.lock'],
|
||||
[supportedPackageManagers.npm, '/**/file.lock'],
|
||||
[supportedPackageManagers.pnpm, ''],
|
||||
[supportedPackageManagers.pnpm, '/dir/file.lock'],
|
||||
[supportedPackageManagers.pnpm, '/**/file.lock']
|
||||
])(
|
||||
'getCacheDirectoriesPaths should return one dir for non yarn',
|
||||
async (packageManagerInfo, cacheDependency) => {
|
||||
getCommandOutputSpy.mockImplementation(() => 'foo');
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
packageManagerInfo,
|
||||
cacheDependency
|
||||
);
|
||||
expect(dirs).toEqual(['foo']);
|
||||
// to do not call for a version
|
||||
// call once for get cache folder
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(1);
|
||||
}
|
||||
);
|
||||
|
||||
it('getCacheDirectoriesPaths should return one dir for yarn without cacheDependency', async () => {
|
||||
getCommandOutputSpy.mockImplementation(() => 'foo');
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
''
|
||||
);
|
||||
expect(dirs).toEqual(['foo']);
|
||||
});
|
||||
|
||||
it.each([
|
||||
[supportedPackageManagers.npm, ''],
|
||||
[supportedPackageManagers.npm, '/dir/file.lock'],
|
||||
[supportedPackageManagers.npm, '/**/file.lock'],
|
||||
[supportedPackageManagers.pnpm, ''],
|
||||
[supportedPackageManagers.pnpm, '/dir/file.lock'],
|
||||
[supportedPackageManagers.pnpm, '/**/file.lock'],
|
||||
[supportedPackageManagers.yarn, ''],
|
||||
[supportedPackageManagers.yarn, '/dir/file.lock'],
|
||||
[supportedPackageManagers.yarn, '/**/file.lock']
|
||||
])(
|
||||
'getCacheDirectoriesPaths should throw for getCommandOutput returning empty',
|
||||
async (packageManagerInfo, cacheDependency) => {
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
// return empty string to indicate getCacheFolderPath failed
|
||||
// --version still works
|
||||
command.includes('version') ? '1.' : ''
|
||||
);
|
||||
|
||||
await expect(
|
||||
cacheUtils.getCacheDirectories(packageManagerInfo, cacheDependency)
|
||||
).rejects.toThrow(); //'Could not get cache folder path for /dir');
|
||||
}
|
||||
);
|
||||
|
||||
it.each([
|
||||
[supportedPackageManagers.yarn, '/dir/file.lock'],
|
||||
[supportedPackageManagers.yarn, '/**/file.lock']
|
||||
])(
|
||||
'getCacheDirectoriesPaths should nothrow in case of having not directories',
|
||||
async (packageManagerInfo, cacheDependency) => {
|
||||
lstatSpy.mockImplementation(arg => ({
|
||||
isDirectory: () => false
|
||||
}));
|
||||
|
||||
await cacheUtils.getCacheDirectories(
|
||||
packageManagerInfo,
|
||||
cacheDependency
|
||||
);
|
||||
expect(warningSpy).toHaveBeenCalledTimes(1);
|
||||
expect(warningSpy).toHaveBeenCalledWith(
|
||||
`No existing directories found containing cache-dependency-path="${cacheDependency}"`
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return one dir without cacheDependency',
|
||||
async version => {
|
||||
getCommandOutputSpy.mockImplementationOnce(() => version);
|
||||
getCommandOutputSpy.mockImplementationOnce(() => `foo${version}`);
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
''
|
||||
);
|
||||
expect(dirs).toEqual([`foo${version}`]);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return 2 dirs with globbed cacheDependency',
|
||||
async version => {
|
||||
let dirNo = 1;
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
command.includes('version') ? version : `file_${version}_${dirNo++}`
|
||||
);
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create(['/tmp/dir1/file', '/tmp/dir2/file'])
|
||||
);
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
'/tmp/**/file'
|
||||
);
|
||||
expect(dirs).toEqual([`file_${version}_1`, `file_${version}_2`]);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return 2 dirs with globbed cacheDependency expanding to duplicates',
|
||||
async version => {
|
||||
let dirNo = 1;
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
command.includes('version') ? version : `file_${version}_${dirNo++}`
|
||||
);
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create([
|
||||
'/tmp/dir1/file',
|
||||
'/tmp/dir2/file',
|
||||
'/tmp/dir1/file'
|
||||
])
|
||||
);
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
'/tmp/**/file'
|
||||
);
|
||||
expect(dirs).toEqual([`file_${version}_1`, `file_${version}_2`]);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return 2 uniq dirs despite duplicate cache directories',
|
||||
async version => {
|
||||
let dirNo = 1;
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
command.includes('version')
|
||||
? version
|
||||
: `file_${version}_${dirNo++ % 2}`
|
||||
);
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create([
|
||||
'/tmp/dir1/file',
|
||||
'/tmp/dir2/file',
|
||||
'/tmp/dir3/file'
|
||||
])
|
||||
);
|
||||
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
'/tmp/**/file'
|
||||
);
|
||||
expect(dirs).toEqual([`file_${version}_1`, `file_${version}_0`]);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledTimes(6);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
'yarn --version',
|
||||
'/tmp/dir1'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
'yarn --version',
|
||||
'/tmp/dir2'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
'yarn --version',
|
||||
'/tmp/dir3'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
version.startsWith('1.')
|
||||
? 'yarn cache dir'
|
||||
: 'yarn config get cacheFolder',
|
||||
'/tmp/dir1'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
version.startsWith('1.')
|
||||
? 'yarn cache dir'
|
||||
: 'yarn config get cacheFolder',
|
||||
'/tmp/dir2'
|
||||
);
|
||||
expect(getCommandOutputSpy).toHaveBeenCalledWith(
|
||||
version.startsWith('1.')
|
||||
? 'yarn cache dir'
|
||||
: 'yarn config get cacheFolder',
|
||||
'/tmp/dir3'
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
it.each(['1.1.1', '2.2.2'])(
|
||||
'getCacheDirectoriesPaths yarn v%s should return 4 dirs with multiple globs',
|
||||
async version => {
|
||||
// simulate wrong indents
|
||||
const cacheDependencyPath = `/tmp/dir1/file
|
||||
/tmp/dir2/file
|
||||
/tmp/**/file
|
||||
`;
|
||||
globCreateSpy.mockImplementation(
|
||||
(pattern: string): Promise<Globber> =>
|
||||
MockGlobber.create([
|
||||
'/tmp/dir1/file',
|
||||
'/tmp/dir2/file',
|
||||
'/tmp/dir3/file',
|
||||
'/tmp/dir4/file'
|
||||
])
|
||||
);
|
||||
let dirNo = 1;
|
||||
getCommandOutputSpy.mockImplementation((command: string) =>
|
||||
command.includes('version') ? version : `file_${version}_${dirNo++}`
|
||||
);
|
||||
const dirs = await cacheUtils.getCacheDirectories(
|
||||
supportedPackageManagers.yarn,
|
||||
cacheDependencyPath
|
||||
);
|
||||
expect(dirs).toEqual([
|
||||
`file_${version}_1`,
|
||||
`file_${version}_2`,
|
||||
`file_${version}_3`,
|
||||
`file_${version}_4`
|
||||
]);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
1
__tests__/data/.tool-versions-node
Normal file
1
__tests__/data/.tool-versions-node
Normal file
|
@ -0,0 +1 @@
|
|||
node 14.0.0
|
18
__tests__/mock/glob-mock.test.ts
Normal file
18
__tests__/mock/glob-mock.test.ts
Normal file
|
@ -0,0 +1,18 @@
|
|||
import {MockGlobber} from './glob-mock';
|
||||
|
||||
describe('mocked globber tests', () => {
|
||||
it('globber should return generator', async () => {
|
||||
const globber = new MockGlobber(['aaa', 'bbb', 'ccc']);
|
||||
const generator = globber.globGenerator();
|
||||
const result: string[] = [];
|
||||
for await (const itemPath of generator) {
|
||||
result.push(itemPath);
|
||||
}
|
||||
expect(result).toEqual(['aaa', 'bbb', 'ccc']);
|
||||
});
|
||||
it('globber should return glob', async () => {
|
||||
const globber = new MockGlobber(['aaa', 'bbb', 'ccc']);
|
||||
const result: string[] = await globber.glob();
|
||||
expect(result).toEqual(['aaa', 'bbb', 'ccc']);
|
||||
});
|
||||
});
|
29
__tests__/mock/glob-mock.ts
Normal file
29
__tests__/mock/glob-mock.ts
Normal file
|
@ -0,0 +1,29 @@
|
|||
import {Globber} from '@actions/glob';
|
||||
|
||||
export class MockGlobber implements Globber {
|
||||
private readonly expected: string[];
|
||||
constructor(expected: string[]) {
|
||||
this.expected = expected;
|
||||
}
|
||||
getSearchPaths(): string[] {
|
||||
return this.expected.slice();
|
||||
}
|
||||
|
||||
async glob(): Promise<string[]> {
|
||||
const result: string[] = [];
|
||||
for await (const itemPath of this.globGenerator()) {
|
||||
result.push(itemPath);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async *globGenerator(): AsyncGenerator<string, void> {
|
||||
for (const e of this.expected) {
|
||||
yield e;
|
||||
}
|
||||
}
|
||||
|
||||
static async create(expected: string[]): Promise<MockGlobber> {
|
||||
return new MockGlobber(expected);
|
||||
}
|
||||
}
|
|
@ -95,6 +95,8 @@ describe('setup-node', () => {
|
|||
res = <INodeVersion[]>nodeTestDistRc;
|
||||
} else if (url.includes('/nightly')) {
|
||||
res = <INodeVersion[]>nodeTestDistNightly;
|
||||
} else if (url.includes('/v8-canary')) {
|
||||
res = <INodeVersion[]>nodeV8CanaryTestDist;
|
||||
} else {
|
||||
res = <INodeVersion[]>nodeTestDist;
|
||||
}
|
||||
|
|
59
__tests__/prepare-yarn-subprojects.sh
Executable file
59
__tests__/prepare-yarn-subprojects.sh
Executable file
|
@ -0,0 +1,59 @@
|
|||
#!/bin/sh -e
|
||||
export YARN_ENABLE_IMMUTABLE_INSTALLS=false
|
||||
rm package.json
|
||||
rm package-lock.json
|
||||
echo "create yarn2 project in the sub2"
|
||||
mkdir sub2
|
||||
cd sub2
|
||||
cat <<EOT >package.json
|
||||
{
|
||||
"name": "subproject",
|
||||
"dependencies": {
|
||||
"random": "^3.0.6",
|
||||
"uuid": "^9.0.0"
|
||||
}
|
||||
}
|
||||
EOT
|
||||
yarn set version 2.4.3
|
||||
yarn install
|
||||
|
||||
echo "create yarn3 project in the sub3"
|
||||
cd ..
|
||||
mkdir sub3
|
||||
cd sub3
|
||||
cat <<EOT >package.json
|
||||
{
|
||||
"name": "subproject",
|
||||
"dependencies": {
|
||||
"random": "^3.0.6",
|
||||
"uuid": "^9.0.0"
|
||||
}
|
||||
}
|
||||
EOT
|
||||
yarn set version 3.5.1
|
||||
yarn install
|
||||
if [ x$1 = 'xglobal' ];then
|
||||
echo enableGlobalCache
|
||||
echo 'enableGlobalCache: true' >> .yarnrc.yml
|
||||
fi
|
||||
|
||||
cd ..
|
||||
if [ x$1 != 'xkeepcache' -a x$2 != 'xkeepcache' ]; then
|
||||
rm -rf sub2/.yarn/cache
|
||||
rm -rf sub3/.yarn/cache
|
||||
fi
|
||||
|
||||
if [ x$1 = 'xyarn1' ];then
|
||||
echo "create yarn1 project in the root"
|
||||
cat <<EOT >package.json
|
||||
{
|
||||
"name": "subproject",
|
||||
"dependencies": {
|
||||
"random": "^3.0.6",
|
||||
"uuid": "^9.0.0"
|
||||
}
|
||||
}
|
||||
EOT
|
||||
yarn set version 1.22.19
|
||||
yarn install
|
||||
fi
|
2922
dist/cache-save/index.js
vendored
2922
dist/cache-save/index.js
vendored
File diff suppressed because it is too large
Load diff
1645
dist/setup/index.js
vendored
1645
dist/setup/index.js
vendored
File diff suppressed because it is too large
Load diff
|
@ -261,10 +261,14 @@ steps:
|
|||
with:
|
||||
node-version: '14'
|
||||
cache: 'pnpm'
|
||||
- run: pnpm install --frozen-lockfile
|
||||
- run: pnpm install
|
||||
- run: pnpm test
|
||||
```
|
||||
|
||||
> **Note**: By default `--frozen-lockfile` option is passed starting from pnpm `6.10.x`. It will be automatically added if you run it on [CI](https://pnpm.io/cli/install#--frozen-lockfile).
|
||||
> If the `pnpm-lock.yaml` file changes then pass `--frozen-lockfile` option.
|
||||
|
||||
|
||||
**Using wildcard patterns to cache dependencies**
|
||||
```yaml
|
||||
steps:
|
||||
|
@ -401,11 +405,14 @@ steps:
|
|||
yarn config set npmScopes.my-org.npmAlwaysAuth true
|
||||
yarn config set npmScopes.my-org.npmAuthToken $NPM_AUTH_TOKEN
|
||||
env:
|
||||
NPM_AUTH_TOKEN: ${{ secrets.YARN_TOKEN }}
|
||||
NPM_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Install dependencies
|
||||
run: yarn install --immutable
|
||||
```
|
||||
NOTE: As per https://github.com/actions/setup-node/issues/49 you cannot use `secrets.GITHUB_TOKEN` to access private GitHub Packages within the same organisation but in a different repository.
|
||||
|
||||
To access private GitHub Packages within the same organization, go to "Manage Actions access" in Package settings and set the repositories you want to access.
|
||||
|
||||
Please refer to the [Ensuring workflow access to your package - Configuring a package's access control and visibility](https://docs.github.com/en/packages/learn-github-packages/configuring-a-packages-access-control-and-visibility#ensuring-workflow-access-to-your-package) for more details.
|
||||
|
||||
### always-auth input
|
||||
The always-auth input sets `always-auth=true` in .npmrc file. With this option set [npm](https://docs.npmjs.com/cli/v6/using-npm/config#always-auth)/yarn sends the authentication credentials when making a request to the registries.
|
||||
|
|
239
package-lock.json
generated
239
package-lock.json
generated
|
@ -17,7 +17,7 @@
|
|||
"@actions/http-client": "^1.0.11",
|
||||
"@actions/io": "^1.0.2",
|
||||
"@actions/tool-cache": "^1.5.4",
|
||||
"semver": "^6.1.1"
|
||||
"semver": "^6.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^27.0.2",
|
||||
|
@ -39,17 +39,18 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@actions/cache": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.4.tgz",
|
||||
"integrity": "sha512-9RwVL8/ISJoYWFNH1wR/C26E+M3HDkGPWmbFJMMCKwTkjbNZJreMT4XaR/EB1bheIvN4PREQxEQQVJ18IPnf/Q==",
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.1.tgz",
|
||||
"integrity": "sha512-QurbMiY//02+0kN1adJkMHN44RcZ5kAXfhSnKUZmtSmhMTNqLitGArG1xOkt93NNyByTlLGAc5wIOF/dZ2ENOQ==",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.0.1",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/http-client": "^2.0.1",
|
||||
"@actions/io": "^1.0.1",
|
||||
"@azure/abort-controller": "^1.1.0",
|
||||
"@azure/ms-rest-js": "^2.6.0",
|
||||
"@azure/storage-blob": "^12.8.0",
|
||||
"@azure/storage-blob": "^12.13.0",
|
||||
"semver": "^6.1.0",
|
||||
"uuid": "^3.3.3"
|
||||
}
|
||||
|
@ -158,14 +159,14 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@azure/abort-controller": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.0.4.tgz",
|
||||
"integrity": "sha512-lNUmDRVGpanCsiUN3NWxFTdwmdFI53xwhkTFfHDGTYk46ca7Ind3nanJc+U6Zj9Tv+9nTCWRBscWEW1DyKOpTw==",
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz",
|
||||
"integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==",
|
||||
"dependencies": {
|
||||
"tslib": "^2.0.0"
|
||||
"tslib": "^2.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=8.0.0"
|
||||
"node": ">=12.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/abort-controller/node_modules/tslib": {
|
||||
|
@ -335,15 +336,14 @@
|
|||
"integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw=="
|
||||
},
|
||||
"node_modules/@azure/ms-rest-js": {
|
||||
"version": "2.6.6",
|
||||
"resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.6.6.tgz",
|
||||
"integrity": "sha512-WYIda8VvrkZE68xHgOxUXvjThxNf1nnGPPe0rAljqK5HJHIZ12Pi3YhEDOn3Ge7UnwaaM3eFO0VtAy4nGVI27Q==",
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.7.0.tgz",
|
||||
"integrity": "sha512-ngbzWbqF+NmztDOpLBVDxYM+XLcUj7nKhxGbSU9WtIsXfRB//cf2ZbAG5HkOrhU9/wd/ORRB6lM/d69RKVjiyA==",
|
||||
"dependencies": {
|
||||
"@azure/core-auth": "^1.1.4",
|
||||
"abort-controller": "^3.0.0",
|
||||
"form-data": "^2.5.0",
|
||||
"node-fetch": "^2.6.7",
|
||||
"tough-cookie": "^3.0.1",
|
||||
"tslib": "^1.10.0",
|
||||
"tunnel": "0.0.6",
|
||||
"uuid": "^8.3.2",
|
||||
|
@ -363,19 +363,6 @@
|
|||
"node": ">= 0.12"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/ms-rest-js/node_modules/tough-cookie": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz",
|
||||
"integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==",
|
||||
"dependencies": {
|
||||
"ip-regex": "^2.1.0",
|
||||
"psl": "^1.1.28",
|
||||
"punycode": "^2.1.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/@azure/ms-rest-js/node_modules/uuid": {
|
||||
"version": "8.3.2",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
|
||||
|
@ -458,15 +445,6 @@
|
|||
"url": "https://opencollective.com/babel"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/core/node_modules/semver": {
|
||||
"version": "6.3.0",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
|
||||
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/core/node_modules/source-map": {
|
||||
"version": "0.5.7",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz",
|
||||
|
@ -517,15 +495,6 @@
|
|||
"@babel/core": "^7.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/helper-compilation-targets/node_modules/semver": {
|
||||
"version": "6.3.0",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
|
||||
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
}
|
||||
},
|
||||
"node_modules/@babel/helper-function-name": {
|
||||
"version": "7.15.4",
|
||||
"resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.15.4.tgz",
|
||||
|
@ -3785,14 +3754,6 @@
|
|||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/ip-regex": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz",
|
||||
"integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk=",
|
||||
"engines": {
|
||||
"node": ">=4"
|
||||
}
|
||||
},
|
||||
"node_modules/is-ci": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.0.tgz",
|
||||
|
@ -3923,15 +3884,6 @@
|
|||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/istanbul-lib-instrument/node_modules/semver": {
|
||||
"version": "6.3.0",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
|
||||
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
}
|
||||
},
|
||||
"node_modules/istanbul-lib-report": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz",
|
||||
|
@ -5538,7 +5490,8 @@
|
|||
"node_modules/psl": {
|
||||
"version": "1.8.0",
|
||||
"resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz",
|
||||
"integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ=="
|
||||
"integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/pump": {
|
||||
"version": "3.0.0",
|
||||
|
@ -5553,10 +5506,17 @@
|
|||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
|
||||
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/querystringify": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
|
||||
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/queue-microtask": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
|
||||
|
@ -5604,6 +5564,12 @@
|
|||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/requires-port": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
|
||||
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/resolve": {
|
||||
"version": "1.22.2",
|
||||
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.2.tgz",
|
||||
|
@ -5720,11 +5686,11 @@
|
|||
}
|
||||
},
|
||||
"node_modules/semver": {
|
||||
"version": "6.1.2",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.1.2.tgz",
|
||||
"integrity": "sha512-z4PqiCpomGtWj8633oeAdXm1Kn1W++3T8epkZYnwiVgIYIJ0QHszhInYSJTYxebByQH7KVCEAn8R9duzZW2PhQ==",
|
||||
"version": "6.3.1",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
|
||||
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
|
||||
"bin": {
|
||||
"semver": "bin/semver"
|
||||
"semver": "bin/semver.js"
|
||||
}
|
||||
},
|
||||
"node_modules/shebang-command": {
|
||||
|
@ -6002,14 +5968,15 @@
|
|||
}
|
||||
},
|
||||
"node_modules/tough-cookie": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz",
|
||||
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==",
|
||||
"version": "4.1.3",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
|
||||
"integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"psl": "^1.1.33",
|
||||
"punycode": "^2.1.1",
|
||||
"universalify": "^0.1.2"
|
||||
"universalify": "^0.2.0",
|
||||
"url-parse": "^1.5.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6"
|
||||
|
@ -6174,9 +6141,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/universalify": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
|
||||
"integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">= 4.0.0"
|
||||
|
@ -6191,6 +6158,16 @@
|
|||
"punycode": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/url-parse": {
|
||||
"version": "1.5.10",
|
||||
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
|
||||
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"querystringify": "^2.1.1",
|
||||
"requires-port": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/uuid": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",
|
||||
|
@ -6466,17 +6443,18 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@actions/cache": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.4.tgz",
|
||||
"integrity": "sha512-9RwVL8/ISJoYWFNH1wR/C26E+M3HDkGPWmbFJMMCKwTkjbNZJreMT4XaR/EB1bheIvN4PREQxEQQVJ18IPnf/Q==",
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.1.tgz",
|
||||
"integrity": "sha512-QurbMiY//02+0kN1adJkMHN44RcZ5kAXfhSnKUZmtSmhMTNqLitGArG1xOkt93NNyByTlLGAc5wIOF/dZ2ENOQ==",
|
||||
"requires": {
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.0.1",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/http-client": "^2.0.1",
|
||||
"@actions/io": "^1.0.1",
|
||||
"@azure/abort-controller": "^1.1.0",
|
||||
"@azure/ms-rest-js": "^2.6.0",
|
||||
"@azure/storage-blob": "^12.8.0",
|
||||
"@azure/storage-blob": "^12.13.0",
|
||||
"semver": "^6.1.0",
|
||||
"uuid": "^3.3.3"
|
||||
},
|
||||
|
@ -6582,11 +6560,11 @@
|
|||
}
|
||||
},
|
||||
"@azure/abort-controller": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.0.4.tgz",
|
||||
"integrity": "sha512-lNUmDRVGpanCsiUN3NWxFTdwmdFI53xwhkTFfHDGTYk46ca7Ind3nanJc+U6Zj9Tv+9nTCWRBscWEW1DyKOpTw==",
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz",
|
||||
"integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==",
|
||||
"requires": {
|
||||
"tslib": "^2.0.0"
|
||||
"tslib": "^2.2.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"tslib": {
|
||||
|
@ -6742,15 +6720,14 @@
|
|||
}
|
||||
},
|
||||
"@azure/ms-rest-js": {
|
||||
"version": "2.6.6",
|
||||
"resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.6.6.tgz",
|
||||
"integrity": "sha512-WYIda8VvrkZE68xHgOxUXvjThxNf1nnGPPe0rAljqK5HJHIZ12Pi3YhEDOn3Ge7UnwaaM3eFO0VtAy4nGVI27Q==",
|
||||
"version": "2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.7.0.tgz",
|
||||
"integrity": "sha512-ngbzWbqF+NmztDOpLBVDxYM+XLcUj7nKhxGbSU9WtIsXfRB//cf2ZbAG5HkOrhU9/wd/ORRB6lM/d69RKVjiyA==",
|
||||
"requires": {
|
||||
"@azure/core-auth": "^1.1.4",
|
||||
"abort-controller": "^3.0.0",
|
||||
"form-data": "^2.5.0",
|
||||
"node-fetch": "^2.6.7",
|
||||
"tough-cookie": "^3.0.1",
|
||||
"tslib": "^1.10.0",
|
||||
"tunnel": "0.0.6",
|
||||
"uuid": "^8.3.2",
|
||||
|
@ -6767,16 +6744,6 @@
|
|||
"mime-types": "^2.1.12"
|
||||
}
|
||||
},
|
||||
"tough-cookie": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz",
|
||||
"integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==",
|
||||
"requires": {
|
||||
"ip-regex": "^2.1.0",
|
||||
"psl": "^1.1.28",
|
||||
"punycode": "^2.1.1"
|
||||
}
|
||||
},
|
||||
"uuid": {
|
||||
"version": "8.3.2",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
|
||||
|
@ -6844,12 +6811,6 @@
|
|||
"source-map": "^0.5.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"semver": {
|
||||
"version": "6.3.0",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
|
||||
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
|
||||
"dev": true
|
||||
},
|
||||
"source-map": {
|
||||
"version": "0.5.7",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz",
|
||||
|
@ -6887,14 +6848,6 @@
|
|||
"@babel/helper-validator-option": "^7.14.5",
|
||||
"browserslist": "^4.16.6",
|
||||
"semver": "^6.3.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"semver": {
|
||||
"version": "6.3.0",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
|
||||
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"@babel/helper-function-name": {
|
||||
|
@ -9367,11 +9320,6 @@
|
|||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
|
||||
"dev": true
|
||||
},
|
||||
"ip-regex": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz",
|
||||
"integrity": "sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk="
|
||||
},
|
||||
"is-ci": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.0.tgz",
|
||||
|
@ -9467,14 +9415,6 @@
|
|||
"@istanbuljs/schema": "^0.1.2",
|
||||
"istanbul-lib-coverage": "^3.0.0",
|
||||
"semver": "^6.3.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"semver": {
|
||||
"version": "6.3.0",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
|
||||
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"istanbul-lib-report": {
|
||||
|
@ -10695,7 +10635,8 @@
|
|||
"psl": {
|
||||
"version": "1.8.0",
|
||||
"resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz",
|
||||
"integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ=="
|
||||
"integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==",
|
||||
"dev": true
|
||||
},
|
||||
"pump": {
|
||||
"version": "3.0.0",
|
||||
|
@ -10709,7 +10650,14 @@
|
|||
"punycode": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
|
||||
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A=="
|
||||
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
|
||||
"dev": true
|
||||
},
|
||||
"querystringify": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
|
||||
"integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==",
|
||||
"dev": true
|
||||
},
|
||||
"queue-microtask": {
|
||||
"version": "1.2.3",
|
||||
|
@ -10735,6 +10683,12 @@
|
|||
"integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=",
|
||||
"dev": true
|
||||
},
|
||||
"requires-port": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
|
||||
"integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==",
|
||||
"dev": true
|
||||
},
|
||||
"resolve": {
|
||||
"version": "1.22.2",
|
||||
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.2.tgz",
|
||||
|
@ -10812,9 +10766,9 @@
|
|||
}
|
||||
},
|
||||
"semver": {
|
||||
"version": "6.1.2",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.1.2.tgz",
|
||||
"integrity": "sha512-z4PqiCpomGtWj8633oeAdXm1Kn1W++3T8epkZYnwiVgIYIJ0QHszhInYSJTYxebByQH7KVCEAn8R9duzZW2PhQ=="
|
||||
"version": "6.3.1",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
|
||||
"integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="
|
||||
},
|
||||
"shebang-command": {
|
||||
"version": "1.2.0",
|
||||
|
@ -11024,14 +10978,15 @@
|
|||
}
|
||||
},
|
||||
"tough-cookie": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz",
|
||||
"integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==",
|
||||
"version": "4.1.3",
|
||||
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.3.tgz",
|
||||
"integrity": "sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"psl": "^1.1.33",
|
||||
"punycode": "^2.1.1",
|
||||
"universalify": "^0.1.2"
|
||||
"universalify": "^0.2.0",
|
||||
"url-parse": "^1.5.3"
|
||||
}
|
||||
},
|
||||
"tr46": {
|
||||
|
@ -11134,9 +11089,9 @@
|
|||
}
|
||||
},
|
||||
"universalify": {
|
||||
"version": "0.1.2",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
|
||||
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==",
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
|
||||
"integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==",
|
||||
"dev": true
|
||||
},
|
||||
"uri-js": {
|
||||
|
@ -11148,6 +11103,16 @@
|
|||
"punycode": "^2.1.0"
|
||||
}
|
||||
},
|
||||
"url-parse": {
|
||||
"version": "1.5.10",
|
||||
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
|
||||
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"querystringify": "^2.1.1",
|
||||
"requires-port": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"uuid": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",
|
||||
|
|
|
@ -33,7 +33,7 @@
|
|||
"@actions/http-client": "^1.0.11",
|
||||
"@actions/io": "^1.0.2",
|
||||
"@actions/tool-cache": "^1.5.4",
|
||||
"semver": "^6.1.1"
|
||||
"semver": "^6.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^27.0.2",
|
||||
|
|
|
@ -6,14 +6,15 @@ import fs from 'fs';
|
|||
|
||||
import {State} from './constants';
|
||||
import {
|
||||
getCacheDirectoryPath,
|
||||
getCacheDirectories,
|
||||
getPackageManagerInfo,
|
||||
repoHasYarnBerryManagedDependencies,
|
||||
PackageManagerInfo
|
||||
} from './cache-utils';
|
||||
|
||||
export const restoreCache = async (
|
||||
packageManager: string,
|
||||
cacheDependencyPath?: string
|
||||
cacheDependencyPath: string
|
||||
) => {
|
||||
const packageManagerInfo = await getPackageManagerInfo(packageManager);
|
||||
if (!packageManagerInfo) {
|
||||
|
@ -21,10 +22,11 @@ export const restoreCache = async (
|
|||
}
|
||||
const platform = process.env.RUNNER_OS;
|
||||
|
||||
const cachePath = await getCacheDirectoryPath(
|
||||
const cachePaths = await getCacheDirectories(
|
||||
packageManagerInfo,
|
||||
packageManager
|
||||
cacheDependencyPath
|
||||
);
|
||||
core.saveState(State.CachePaths, cachePaths);
|
||||
const lockFilePath = cacheDependencyPath
|
||||
? cacheDependencyPath
|
||||
: findLockFile(packageManagerInfo);
|
||||
|
@ -36,12 +38,26 @@ export const restoreCache = async (
|
|||
);
|
||||
}
|
||||
|
||||
const primaryKey = `node-cache-${platform}-${packageManager}-${fileHash}`;
|
||||
const keyPrefix = `node-cache-${platform}-${packageManager}`;
|
||||
const primaryKey = `${keyPrefix}-${fileHash}`;
|
||||
core.debug(`primary key is ${primaryKey}`);
|
||||
|
||||
core.saveState(State.CachePrimaryKey, primaryKey);
|
||||
|
||||
const cacheKey = await cache.restoreCache([cachePath], primaryKey);
|
||||
const isManagedByYarnBerry = await repoHasYarnBerryManagedDependencies(
|
||||
packageManagerInfo,
|
||||
cacheDependencyPath
|
||||
);
|
||||
let cacheKey: string | undefined;
|
||||
if (isManagedByYarnBerry) {
|
||||
core.info(
|
||||
'All dependencies are managed locally by yarn3, the previous cache can be used'
|
||||
);
|
||||
cacheKey = await cache.restoreCache(cachePaths, primaryKey, [keyPrefix]);
|
||||
} else {
|
||||
cacheKey = await cache.restoreCache(cachePaths, primaryKey);
|
||||
}
|
||||
|
||||
core.setOutput('cache-hit', Boolean(cacheKey));
|
||||
|
||||
if (!cacheKey) {
|
||||
|
@ -56,6 +72,7 @@ export const restoreCache = async (
|
|||
const findLockFile = (packageManager: PackageManagerInfo) => {
|
||||
const lockFiles = packageManager.lockFilePatterns;
|
||||
const workspace = process.env.GITHUB_WORKSPACE!;
|
||||
|
||||
const rootContent = fs.readdirSync(workspace);
|
||||
|
||||
const lockFile = lockFiles.find(item => rootContent.includes(item));
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
import * as core from '@actions/core';
|
||||
import * as cache from '@actions/cache';
|
||||
|
||||
import fs from 'fs';
|
||||
|
||||
import {State} from './constants';
|
||||
import {getCacheDirectoryPath, getPackageManagerInfo} from './cache-utils';
|
||||
import {getPackageManagerInfo} from './cache-utils';
|
||||
|
||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
||||
|
@ -24,6 +26,10 @@ export async function run() {
|
|||
const cachePackages = async (packageManager: string) => {
|
||||
const state = core.getState(State.CacheMatchedKey);
|
||||
const primaryKey = core.getState(State.CachePrimaryKey);
|
||||
let cachePaths = JSON.parse(
|
||||
core.getState(State.CachePaths) || '[]'
|
||||
) as string[];
|
||||
cachePaths = cachePaths.filter(fs.existsSync);
|
||||
|
||||
const packageManagerInfo = await getPackageManagerInfo(packageManager);
|
||||
if (!packageManagerInfo) {
|
||||
|
@ -31,14 +37,12 @@ const cachePackages = async (packageManager: string) => {
|
|||
return;
|
||||
}
|
||||
|
||||
const cachePath = await getCacheDirectoryPath(
|
||||
packageManagerInfo,
|
||||
packageManager
|
||||
);
|
||||
|
||||
if (!fs.existsSync(cachePath)) {
|
||||
if (!cachePaths.length) {
|
||||
// TODO: core.getInput has a bug - it can return undefined despite its definition (tests only?)
|
||||
// export declare function getInput(name: string, options?: InputOptions): string;
|
||||
const cacheDependencyPath = core.getInput('cache-dependency-path') || '';
|
||||
throw new Error(
|
||||
`Cache folder path is retrieved for ${packageManager} but doesn't exist on disk: ${cachePath}`
|
||||
`Cache folder paths are not retrieved for ${packageManager} with cache-dependency-path = ${cacheDependencyPath}`
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -49,7 +53,7 @@ const cachePackages = async (packageManager: string) => {
|
|||
return;
|
||||
}
|
||||
|
||||
const cacheId = await cache.saveCache([cachePath], primaryKey);
|
||||
const cacheId = await cache.saveCache(cachePaths, primaryKey);
|
||||
if (cacheId == -1) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -1,40 +1,79 @@
|
|||
import * as core from '@actions/core';
|
||||
import * as exec from '@actions/exec';
|
||||
import * as cache from '@actions/cache';
|
||||
|
||||
type SupportedPackageManagers = {
|
||||
[prop: string]: PackageManagerInfo;
|
||||
};
|
||||
import * as glob from '@actions/glob';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import {unique} from './util';
|
||||
|
||||
export interface PackageManagerInfo {
|
||||
name: string;
|
||||
lockFilePatterns: Array<string>;
|
||||
getCacheFolderCommand: string;
|
||||
getCacheFolderPath: (projectDir?: string) => Promise<string>;
|
||||
}
|
||||
|
||||
interface SupportedPackageManagers {
|
||||
npm: PackageManagerInfo;
|
||||
pnpm: PackageManagerInfo;
|
||||
yarn: PackageManagerInfo;
|
||||
}
|
||||
export const supportedPackageManagers: SupportedPackageManagers = {
|
||||
npm: {
|
||||
name: 'npm',
|
||||
lockFilePatterns: ['package-lock.json', 'npm-shrinkwrap.json', 'yarn.lock'],
|
||||
getCacheFolderCommand: 'npm config get cache'
|
||||
getCacheFolderPath: () =>
|
||||
getCommandOutputNotEmpty(
|
||||
'npm config get cache',
|
||||
'Could not get npm cache folder path'
|
||||
)
|
||||
},
|
||||
pnpm: {
|
||||
name: 'pnpm',
|
||||
lockFilePatterns: ['pnpm-lock.yaml'],
|
||||
getCacheFolderCommand: 'pnpm store path --silent'
|
||||
getCacheFolderPath: () =>
|
||||
getCommandOutputNotEmpty(
|
||||
'pnpm store path --silent',
|
||||
'Could not get pnpm cache folder path'
|
||||
)
|
||||
},
|
||||
yarn1: {
|
||||
yarn: {
|
||||
name: 'yarn',
|
||||
lockFilePatterns: ['yarn.lock'],
|
||||
getCacheFolderCommand: 'yarn cache dir'
|
||||
},
|
||||
yarn2: {
|
||||
lockFilePatterns: ['yarn.lock'],
|
||||
getCacheFolderCommand: 'yarn config get cacheFolder'
|
||||
getCacheFolderPath: async projectDir => {
|
||||
const yarnVersion = await getCommandOutputNotEmpty(
|
||||
`yarn --version`,
|
||||
'Could not retrieve version of yarn',
|
||||
projectDir
|
||||
);
|
||||
|
||||
core.debug(
|
||||
`Consumed yarn version is ${yarnVersion} (working dir: "${
|
||||
projectDir || ''
|
||||
}")`
|
||||
);
|
||||
|
||||
const stdOut = yarnVersion.startsWith('1.')
|
||||
? await getCommandOutput('yarn cache dir', projectDir)
|
||||
: await getCommandOutput('yarn config get cacheFolder', projectDir);
|
||||
|
||||
if (!stdOut) {
|
||||
throw new Error(
|
||||
`Could not get yarn cache folder path for ${projectDir}`
|
||||
);
|
||||
}
|
||||
return stdOut;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const getCommandOutput = async (toolCommand: string) => {
|
||||
export const getCommandOutput = async (
|
||||
toolCommand: string,
|
||||
cwd?: string
|
||||
): Promise<string> => {
|
||||
let {stdout, stderr, exitCode} = await exec.getExecOutput(
|
||||
toolCommand,
|
||||
undefined,
|
||||
{ignoreReturnCode: true}
|
||||
{ignoreReturnCode: true, ...(cwd && {cwd})}
|
||||
);
|
||||
|
||||
if (exitCode) {
|
||||
|
@ -47,16 +86,15 @@ export const getCommandOutput = async (toolCommand: string) => {
|
|||
return stdout.trim();
|
||||
};
|
||||
|
||||
const getPackageManagerVersion = async (
|
||||
packageManager: string,
|
||||
command: string
|
||||
) => {
|
||||
const stdOut = await getCommandOutput(`${packageManager} ${command}`);
|
||||
|
||||
export const getCommandOutputNotEmpty = async (
|
||||
toolCommand: string,
|
||||
error: string,
|
||||
cwd?: string
|
||||
): Promise<string> => {
|
||||
const stdOut = getCommandOutput(toolCommand, cwd);
|
||||
if (!stdOut) {
|
||||
throw new Error(`Could not retrieve version of ${packageManager}`);
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
return stdOut;
|
||||
};
|
||||
|
||||
|
@ -66,35 +104,191 @@ export const getPackageManagerInfo = async (packageManager: string) => {
|
|||
} else if (packageManager === 'pnpm') {
|
||||
return supportedPackageManagers.pnpm;
|
||||
} else if (packageManager === 'yarn') {
|
||||
const yarnVersion = await getPackageManagerVersion('yarn', '--version');
|
||||
|
||||
core.debug(`Consumed yarn version is ${yarnVersion}`);
|
||||
|
||||
if (yarnVersion.startsWith('1.')) {
|
||||
return supportedPackageManagers.yarn1;
|
||||
} else {
|
||||
return supportedPackageManagers.yarn2;
|
||||
}
|
||||
return supportedPackageManagers.yarn;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
export const getCacheDirectoryPath = async (
|
||||
packageManagerInfo: PackageManagerInfo,
|
||||
packageManager: string
|
||||
) => {
|
||||
const stdOut = await getCommandOutput(
|
||||
packageManagerInfo.getCacheFolderCommand
|
||||
);
|
||||
/**
|
||||
* getProjectDirectoriesFromCacheDependencyPath is called twice during `restoreCache`
|
||||
* - first through `getCacheDirectories`
|
||||
* - second from `repoHasYarn3ManagedCache`
|
||||
*
|
||||
* it contains expensive IO operation and thus should be memoized
|
||||
*/
|
||||
|
||||
if (!stdOut) {
|
||||
throw new Error(`Could not get cache folder path for ${packageManager}`);
|
||||
let projectDirectoriesMemoized: string[] | null = null;
|
||||
/**
|
||||
* unit test must reset memoized variables
|
||||
*/
|
||||
export const resetProjectDirectoriesMemoized = () =>
|
||||
(projectDirectoriesMemoized = null);
|
||||
/**
|
||||
* Expands (converts) the string input `cache-dependency-path` to list of directories that
|
||||
* may be project roots
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of directories and possible
|
||||
*/
|
||||
const getProjectDirectoriesFromCacheDependencyPath = async (
|
||||
cacheDependencyPath: string
|
||||
): Promise<string[]> => {
|
||||
if (projectDirectoriesMemoized !== null) {
|
||||
return projectDirectoriesMemoized;
|
||||
}
|
||||
|
||||
core.debug(`${packageManager} path is ${stdOut}`);
|
||||
const globber = await glob.create(cacheDependencyPath);
|
||||
const cacheDependenciesPaths = await globber.glob();
|
||||
|
||||
return stdOut.trim();
|
||||
const existingDirectories: string[] = cacheDependenciesPaths
|
||||
.map(path.dirname)
|
||||
.filter(unique())
|
||||
.map(dirName => fs.realpathSync(dirName))
|
||||
.filter(directory => fs.lstatSync(directory).isDirectory());
|
||||
|
||||
if (!existingDirectories.length)
|
||||
core.warning(
|
||||
`No existing directories found containing cache-dependency-path="${cacheDependencyPath}"`
|
||||
);
|
||||
|
||||
projectDirectoriesMemoized = existingDirectories;
|
||||
return existingDirectories;
|
||||
};
|
||||
|
||||
/**
|
||||
* Finds the cache directories configured for the repo if cache-dependency-path is not empty
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
const getCacheDirectoriesFromCacheDependencyPath = async (
|
||||
packageManagerInfo: PackageManagerInfo,
|
||||
cacheDependencyPath: string
|
||||
): Promise<string[]> => {
|
||||
const projectDirectories = await getProjectDirectoriesFromCacheDependencyPath(
|
||||
cacheDependencyPath
|
||||
);
|
||||
const cacheFoldersPaths = await Promise.all(
|
||||
projectDirectories.map(async projectDirectory => {
|
||||
const cacheFolderPath = await packageManagerInfo.getCacheFolderPath(
|
||||
projectDirectory
|
||||
);
|
||||
core.debug(
|
||||
`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the directory "${projectDirectory}"`
|
||||
);
|
||||
return cacheFolderPath;
|
||||
})
|
||||
);
|
||||
// uniq in order to do not cache the same directories twice
|
||||
return cacheFoldersPaths.filter(unique());
|
||||
};
|
||||
|
||||
/**
|
||||
* Finds the cache directories configured for the repo ignoring cache-dependency-path
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
const getCacheDirectoriesForRootProject = async (
|
||||
packageManagerInfo: PackageManagerInfo
|
||||
): Promise<string[]> => {
|
||||
const cacheFolderPath = await packageManagerInfo.getCacheFolderPath();
|
||||
core.debug(
|
||||
`${packageManagerInfo.name}'s cache folder "${cacheFolderPath}" configured for the root directory`
|
||||
);
|
||||
return [cacheFolderPath];
|
||||
};
|
||||
|
||||
/**
|
||||
* A function to find the cache directories configured for the repo
|
||||
* currently it handles only the case of PM=yarn && cacheDependencyPath is not empty
|
||||
* @param packageManagerInfo - an object having getCacheFolderPath method specific to given PM
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return list of files on which the cache depends
|
||||
*/
|
||||
export const getCacheDirectories = async (
|
||||
packageManagerInfo: PackageManagerInfo,
|
||||
cacheDependencyPath: string
|
||||
): Promise<string[]> => {
|
||||
// For yarn, if cacheDependencyPath is set, ask information about cache folders in each project
|
||||
// folder satisfied by cacheDependencyPath https://github.com/actions/setup-node/issues/488
|
||||
if (packageManagerInfo.name === 'yarn' && cacheDependencyPath) {
|
||||
return getCacheDirectoriesFromCacheDependencyPath(
|
||||
packageManagerInfo,
|
||||
cacheDependencyPath
|
||||
);
|
||||
}
|
||||
return getCacheDirectoriesForRootProject(packageManagerInfo);
|
||||
};
|
||||
|
||||
/**
|
||||
* A function to check if the directory is a yarn project configured to manage
|
||||
* obsolete dependencies in the local cache
|
||||
* @param directory - a path to the folder
|
||||
* @return - true if the directory's project is yarn managed
|
||||
* - if there's .yarn/cache folder do not mess with the dependencies kept in the repo, return false
|
||||
* - global cache is not managed by yarn @see https://yarnpkg.com/features/offline-cache, return false
|
||||
* - if local cache is not explicitly enabled (not yarn3), return false
|
||||
* - return true otherwise
|
||||
*/
|
||||
const projectHasYarnBerryManagedDependencies = async (
|
||||
directory: string
|
||||
): Promise<boolean> => {
|
||||
const workDir = directory || process.env.GITHUB_WORKSPACE || '.';
|
||||
core.debug(`check if "${workDir}" has locally managed yarn3 dependencies`);
|
||||
|
||||
// if .yarn/cache directory exists the cache is managed by version control system
|
||||
const yarnCacheFile = path.join(workDir, '.yarn', 'cache');
|
||||
if (
|
||||
fs.existsSync(yarnCacheFile) &&
|
||||
fs.lstatSync(yarnCacheFile).isDirectory()
|
||||
) {
|
||||
core.debug(
|
||||
`"${workDir}" has .yarn/cache - dependencies are kept in the repository`
|
||||
);
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
|
||||
// NOTE: yarn1 returns 'undefined' with return code = 0
|
||||
const enableGlobalCache = await getCommandOutput(
|
||||
'yarn config get enableGlobalCache',
|
||||
workDir
|
||||
);
|
||||
// only local cache is not managed by yarn
|
||||
const managed = enableGlobalCache.includes('false');
|
||||
if (managed) {
|
||||
core.debug(`"${workDir}" dependencies are managed by yarn 3 locally`);
|
||||
return true;
|
||||
} else {
|
||||
core.debug(`"${workDir}" dependencies are not managed by yarn 3 locally`);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* A function to report the repo contains Yarn managed projects
|
||||
* @param packageManagerInfo - used to make sure current package manager is yarn
|
||||
* @param cacheDependencyPath - either a single string or multiline string with possible glob patterns
|
||||
* expected to be the result of `core.getInput('cache-dependency-path')`
|
||||
* @return - true if all project directories configured to be Yarn managed
|
||||
*/
|
||||
export const repoHasYarnBerryManagedDependencies = async (
|
||||
packageManagerInfo: PackageManagerInfo,
|
||||
cacheDependencyPath: string
|
||||
): Promise<boolean> => {
|
||||
if (packageManagerInfo.name !== 'yarn') return false;
|
||||
|
||||
const yarnDirs = cacheDependencyPath
|
||||
? await getProjectDirectoriesFromCacheDependencyPath(cacheDependencyPath)
|
||||
: [''];
|
||||
|
||||
const isManagedList = await Promise.all(
|
||||
yarnDirs.map(projectHasYarnBerryManagedDependencies)
|
||||
);
|
||||
|
||||
return isManagedList.every(Boolean);
|
||||
};
|
||||
|
||||
export function isGhes(): boolean {
|
||||
|
|
|
@ -6,7 +6,8 @@ export enum LockType {
|
|||
|
||||
export enum State {
|
||||
CachePrimaryKey = 'CACHE_KEY',
|
||||
CacheMatchedKey = 'CACHE_RESULT'
|
||||
CacheMatchedKey = 'CACHE_RESULT',
|
||||
CachePaths = 'CACHE_PATHS'
|
||||
}
|
||||
|
||||
export enum Outputs {
|
||||
|
|
|
@ -88,7 +88,11 @@ export default abstract class BaseDistribution {
|
|||
}
|
||||
|
||||
protected findVersionInHostedToolCacheDirectory() {
|
||||
return tc.find('node', this.nodeInfo.versionSpec, this.nodeInfo.arch);
|
||||
return tc.find(
|
||||
'node',
|
||||
this.nodeInfo.versionSpec,
|
||||
this.translateArchToDistUrl(this.nodeInfo.arch)
|
||||
);
|
||||
}
|
||||
|
||||
protected async getNodeJsVersions(): Promise<INodeVersion[]> {
|
||||
|
|
11
src/util.ts
11
src/util.ts
|
@ -13,7 +13,7 @@ export function parseNodeVersionFile(contents: string): string {
|
|||
}
|
||||
|
||||
if (!nodeVersion) {
|
||||
const found = contents.match(/^(?:nodejs\s+)?v?(?<version>[^\s]+)$/m);
|
||||
const found = contents.match(/^(?:node(js)?\s+)?v?(?<version>[^\s]+)$/m);
|
||||
nodeVersion = found?.groups?.version;
|
||||
}
|
||||
|
||||
|
@ -61,3 +61,12 @@ async function getToolVersion(tool: string, options: string[]) {
|
|||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
export const unique = () => {
|
||||
const encountered = new Set();
|
||||
return (value: unknown): boolean => {
|
||||
if (encountered.has(value)) return false;
|
||||
encountered.add(value);
|
||||
return true;
|
||||
};
|
||||
};
|
||||
|
|
Loading…
Add table
Reference in a new issue