Compare commits

...

48 Commits
v4.1.9 ... main

Author SHA1 Message Date
Ben De St Paer-Gotch
448e3f862a
Merge pull request #407 from actions/nebuk89-patch-1
Update README.md
2025-06-17 14:18:27 +01:00
Ben De St Paer-Gotch
47225c44b3
Update README.md 2025-06-16 10:11:52 +01:00
Rob Herley
d3f86a106a
Merge pull request #404 from actions/robherley/v4.3.0
Prep for v4.3.0 release
2025-04-24 12:25:03 -04:00
Rob Herley
fc02353415
prep for v4.3.0 release 2025-04-24 11:21:41 -04:00
Josh Gross
77454371a4
Merge pull request #402 from actions/joshmgross/download-by-id-example
Fix workflow example for downloading by artifact ID
2025-04-24 11:04:38 -04:00
Josh Gross
84fc7a0a35
Remove path filters from Check dist workflow 2025-04-23 10:32:04 -04:00
Josh Gross
67f2bc382f
Fix workflow example for downloading by artifact ID 2025-04-23 10:27:20 -04:00
Grant Birkinbine
8ea3c2c174
Merge pull request #401 from actions/download-by-id
feat: implement new `artifact-ids` input
2025-04-22 08:16:56 -07:00
GrantBirki
d219c630f6
add supporting unit tests for artifact downloads with ids 2025-04-17 13:14:21 -07:00
GrantBirki
54124fbd88
revert getArtifact() changes - for now we have to list and filter by artifact-ids until a getArtifactById() public method exists 2025-04-17 12:30:12 -07:00
GrantBirki
b83057b90d
bundle 2025-04-17 12:20:46 -07:00
GrantBirki
171183c7dc
use the same artifactClient.getArtifact structure as seen above in isSingleArtifactDownload logic 2025-04-17 12:18:37 -07:00
GrantBirki
e463631f66
bundle 2025-04-17 11:49:32 -07:00
GrantBirki
ec378bcca1
when only one artifact-id is given, use getArtifact and check the resulting id returned 2025-04-17 11:48:44 -07:00
GrantBirki
42aef06f22
apply https://github.com/actions/download-artifact/pull/401#discussion_r2048225048 suggestion 2025-04-17 09:55:13 -07:00
Grant Birkinbine
ac35f995fe implement new artifact-ids input 2025-04-17 04:47:03 +00:00
Josh Gross
95815c38cf
Merge pull request #391 from GhadimiR/main
Fix bug introduced in 4.2.0
2025-03-19 11:14:44 -04:00
Ryan Ghadimi
278fca438a Move log statements 2025-03-19 15:06:13 +00:00
Ryan Ghadimi
68909842a1
Merge branch 'main' into main 2025-03-19 15:04:09 +00:00
Josh Gross
f9415c0ec3 Run unit tests in CI 2025-03-19 15:01:26 +00:00
Josh Gross
76a6eb5cbc
Merge pull request #392 from GhadimiR/add_unit_tests
Add unit tests
2025-03-19 10:52:21 -04:00
Josh Gross
a2426d7c45
Merge branch 'main' into add_unit_tests 2025-03-19 10:48:52 -04:00
Ryan Ghadimi
3ffa694f6f lint 2025-03-19 12:26:46 +00:00
Ryan Ghadimi
53f6aa5f93 Add extra assertion to download single artifact test 2025-03-19 12:25:37 +00:00
Ryan Ghadimi
b456700053 lint 2025-03-19 11:31:11 +00:00
Ryan Ghadimi
9eab798a98 Configure tsconfig 2025-03-19 11:25:29 +00:00
Ryan Ghadimi
a39a661f39 Unfix error 2025-03-19 11:22:27 +00:00
Ryan Ghadimi
9a869e9c49 Lint 2025-03-19 11:21:30 +00:00
Ryan Ghadimi
96a6f165f4 Add tests & test dependencies 2025-03-19 11:17:03 +00:00
Ryan Ghadimi
df4ad15cb8
Merge branch 'main' into main 2025-03-18 22:02:13 +00:00
Ryan Ghadimi
c7cfc3a2a3 dist & package.json 2025-03-18 21:28:08 +00:00
Ryan Ghadimi
2439186eed Fix bug introduced in 4.2.0 2025-03-18 21:25:22 +00:00
JoannaaKL
b14cf4c926
Merge pull request #383 from GhadimiR/main
Bump artifact version, do digest check
2025-03-18 16:31:00 +01:00
Ryan Ghadimi
c5804ef743 Update dist 2025-03-18 15:23:08 +00:00
Ryan Ghadimi
956811a503 Update artifact to 2.3.2 2025-03-18 15:21:17 +00:00
Ryan Ghadimi
af3c6d3e5b Update artifact license 2025-03-18 13:23:53 +00:00
Ryan Ghadimi
4dd97f8f21 Bump artifact package 2025-03-18 11:57:35 +00:00
Ryan Ghadimi
da9985dde6
Merge branch 'main' into main 2025-03-17 10:26:24 +00:00
Larissa Fortuna
81ba80daa4
Merge pull request #384 from actions/lkfortuna-patch-1
Update README.md
2025-03-13 08:12:46 -07:00
Larissa Fortuna
727afbf2b0
Update README.md
removing deprecation warning
2025-03-12 15:02:16 -07:00
Ryan Ghadimi
56c2d7ea8c Make work as intended 2025-03-12 16:21:59 +00:00
Ryan Ghadimi
7797bfcd59 run release 2025-03-12 16:20:09 +00:00
Ryan Ghadimi
9ff67cb2d2 Break the thing, also log the expected digest 2025-03-12 16:17:52 +00:00
Ryan Ghadimi
049eba1e9a unbreak testing code 2025-03-12 16:04:02 +00:00
Ryan Ghadimi
503e7a18ae Refactor loop, break for testing 2025-03-12 15:40:05 +00:00
Ryan Ghadimi
a8a786b097 update dist 2025-03-12 14:13:07 +00:00
Ryan Ghadimi
24aef17bbf Refactor loop 2025-03-12 14:02:51 +00:00
Ryan Ghadimi
b81a615862 Bump artifact version, do digest check 2025-03-12 13:47:20 +00:00
14 changed files with 9247 additions and 162 deletions

View File

@ -10,11 +10,7 @@ on:
push:
branches:
- main
paths-ignore:
- '**.md'
pull_request:
paths-ignore:
- '**.md'
workflow_dispatch:
jobs:

View File

@ -40,6 +40,9 @@ jobs:
- name: Format
run: npm run format-check
- name: Run Unit Tests
run: npm test
- name: Create artifacts
run: |
mkdir -p path/to/artifact-A

View File

@ -1,6 +1,6 @@
---
name: "@actions/artifact"
version: 2.2.2
version: 2.3.2
type: npm
summary: Actions artifact lib
homepage: https://github.com/actions/toolkit/tree/main/packages/artifact

View File

@ -1,11 +1,5 @@
# `@actions/download-artifact`
> [!WARNING]
> actions/download-artifact@v3 is scheduled for deprecation on **November 30, 2024**. [Learn more.](https://github.blog/changelog/2024-04-16-deprecation-notice-v3-of-the-artifact-actions/)
> Similarly, v1/v2 are scheduled for deprecation on **June 30, 2024**.
> Please update your workflow to use v4 of the artifact actions.
> This deprecation will not impact any existing versions of GitHub Enterprise Server being used by customers.
Download [Actions Artifacts](https://docs.github.com/en/actions/using-workflows/storing-workflow-data-as-artifacts) from your Workflow Runs. Internally powered by the [@actions/artifact](https://github.com/actions/toolkit/tree/main/packages/artifact) package.
See also [upload-artifact](https://github.com/actions/upload-artifact).
@ -19,6 +13,7 @@ See also [upload-artifact](https://github.com/actions/upload-artifact).
- [Outputs](#outputs)
- [Examples](#examples)
- [Download Single Artifact](#download-single-artifact)
- [Download Artifacts by ID](#download-artifacts-by-id)
- [Download All Artifacts](#download-all-artifacts)
- [Download multiple (filtered) Artifacts to the same directory](#download-multiple-filtered-artifacts-to-the-same-directory)
- [Download Artifacts from other Workflow Runs or Repositories](#download-artifacts-from-other-workflow-runs-or-repositories)
@ -47,6 +42,24 @@ For more information, see the [`@actions/artifact`](https://github.com/actions/t
For assistance with breaking changes, see [MIGRATION.md](docs/MIGRATION.md).
## Note
Thank you for your interest in this GitHub repo, however, right now we are not taking contributions.
We continue to focus our resources on strategic areas that help our customers be successful while making developers' lives easier. While GitHub Actions remains a key part of this vision, we are allocating resources towards other areas of Actions and are not taking contributions to this repository at this time. The GitHub public roadmap is the best place to follow along for any updates on features were working on and what stage theyre in.
We are taking the following steps to better direct requests related to GitHub Actions, including:
1. We will be directing questions and support requests to our [Community Discussions area](https://github.com/orgs/community/discussions/categories/actions)
2. High Priority bugs can be reported through Community Discussions or you can report these to our support team https://support.github.com/contact/bug-report.
3. Security Issues should be handled as per our [security.md](SECURITY.md).
We will still provide security updates for this project and fix major breaking changes during this time.
You are welcome to still raise bugs in this repo.
## Usage
### Inputs
@ -59,6 +72,11 @@ For assistance with breaking changes, see [MIGRATION.md](docs/MIGRATION.md).
# Optional.
name:
# IDs of the artifacts to download, comma-separated.
# Either inputs `artifact-ids` or `name` can be used, but not both.
# Optional.
artifact-ids:
# Destination path. Supports basic tilde expansion.
# Optional. Default is $GITHUB_WORKSPACE
path:
@ -123,6 +141,32 @@ steps:
run: ls -R your/destination/dir
```
### Download Artifacts by ID
The `artifact-ids` input allows downloading artifacts using their unique ID rather than name. This is particularly useful when working with immutable artifacts from `actions/upload-artifact@v4` which assigns a unique ID to each artifact.
```yaml
steps:
- uses: actions/download-artifact@v4
with:
artifact-ids: 12345
- name: Display structure of downloaded files
run: ls -R
```
Multiple artifacts can be downloaded by providing a comma-separated list of IDs:
```yaml
steps:
- uses: actions/download-artifact@v4
with:
artifact-ids: 12345,67890
path: path/to/artifacts
- name: Display structure of downloaded files
run: ls -R path/to/artifacts
```
This will download multiple artifacts to separate directories (similar to downloading multiple artifacts by name).
### Download All Artifacts

374
__tests__/download.test.ts Normal file
View File

@ -0,0 +1,374 @@
import * as core from '@actions/core'
import artifact, {ArtifactNotFoundError} from '@actions/artifact'
import {run} from '../src/download-artifact'
import {Inputs} from '../src/constants'
jest.mock('@actions/github', () => ({
context: {
repo: {
owner: 'actions',
repo: 'toolkit'
},
runId: 123,
serverUrl: 'https://github.com'
}
}))
jest.mock('@actions/core')
/* eslint-disable no-unused-vars */ /* eslint-disable @typescript-eslint/no-explicit-any */
const mockInputs = (overrides?: Partial<{[K in Inputs]?: any}>) => {
const inputs = {
[Inputs.Name]: 'artifact-name',
[Inputs.Path]: '/some/artifact/path',
[Inputs.GitHubToken]: 'warn',
[Inputs.Repository]: 'owner/some-repository',
[Inputs.RunID]: 'some-run-id',
[Inputs.Pattern]: 'some-pattern',
...overrides
}
;(core.getInput as jest.Mock).mockImplementation((name: string) => {
return inputs[name]
})
;(core.getBooleanInput as jest.Mock).mockImplementation((name: string) => {
return inputs[name]
})
return inputs
}
describe('download', () => {
beforeEach(async () => {
mockInputs()
jest.clearAllMocks()
// Mock artifact client methods
jest
.spyOn(artifact, 'listArtifacts')
.mockImplementation(() => Promise.resolve({artifacts: []}))
jest.spyOn(artifact, 'getArtifact').mockImplementation(name => {
throw new ArtifactNotFoundError(`Artifact '${name}' not found`)
})
jest
.spyOn(artifact, 'downloadArtifact')
.mockImplementation(() => Promise.resolve({digestMismatch: false}))
})
test('downloads a single artifact by name', async () => {
const mockArtifact = {
id: 123,
name: 'artifact-name',
size: 1024,
digest: 'abc123'
}
jest
.spyOn(artifact, 'getArtifact')
.mockImplementation(() => Promise.resolve({artifact: mockArtifact}))
await run()
expect(artifact.downloadArtifact).toHaveBeenCalledWith(
mockArtifact.id,
expect.objectContaining({
expectedHash: mockArtifact.digest
})
)
expect(core.info).toHaveBeenCalledWith('Total of 1 artifact(s) downloaded')
expect(core.setOutput).toHaveBeenCalledWith(
'download-path',
expect.any(String)
)
expect(core.info).toHaveBeenCalledWith(
'Download artifact has finished successfully'
)
})
test('downloads multiple artifacts when no name or pattern provided', async () => {
jest.clearAllMocks()
mockInputs({
[Inputs.Name]: '',
[Inputs.Pattern]: ''
})
const mockArtifacts = [
{id: 123, name: 'artifact1', size: 1024, digest: 'abc123'},
{id: 456, name: 'artifact2', size: 2048, digest: 'def456'}
]
// Set up artifact mock after clearing mocks
jest
.spyOn(artifact, 'listArtifacts')
.mockImplementation(() => Promise.resolve({artifacts: mockArtifacts}))
// Reset downloadArtifact mock as well
jest
.spyOn(artifact, 'downloadArtifact')
.mockImplementation(() => Promise.resolve({digestMismatch: false}))
await run()
expect(core.info).toHaveBeenCalledWith(
'No input name, artifact-ids or pattern filtered specified, downloading all artifacts'
)
expect(core.info).toHaveBeenCalledWith('Total of 2 artifact(s) downloaded')
expect(artifact.downloadArtifact).toHaveBeenCalledTimes(2)
})
test('sets download path output even when no artifacts are found', async () => {
mockInputs({[Inputs.Name]: ''})
await run()
expect(core.setOutput).toHaveBeenCalledWith(
'download-path',
expect.any(String)
)
expect(core.info).toHaveBeenCalledWith(
'Download artifact has finished successfully'
)
expect(core.info).toHaveBeenCalledWith('Total of 0 artifact(s) downloaded')
})
test('filters artifacts by pattern', async () => {
const mockArtifacts = [
{id: 123, name: 'test-artifact', size: 1024, digest: 'abc123'},
{id: 456, name: 'prod-artifact', size: 2048, digest: 'def456'}
]
jest
.spyOn(artifact, 'listArtifacts')
.mockImplementation(() => Promise.resolve({artifacts: mockArtifacts}))
mockInputs({
[Inputs.Name]: '',
[Inputs.Pattern]: 'test-*'
})
await run()
expect(artifact.downloadArtifact).toHaveBeenCalledTimes(1)
expect(artifact.downloadArtifact).toHaveBeenCalledWith(
123,
expect.anything()
)
})
test('uses token and repository information when provided', async () => {
const token = 'ghp_testtoken123'
mockInputs({
[Inputs.Name]: '',
[Inputs.GitHubToken]: token,
[Inputs.Repository]: 'myorg/myrepo',
[Inputs.RunID]: '789'
})
jest
.spyOn(artifact, 'listArtifacts')
.mockImplementation(() => Promise.resolve({artifacts: []}))
await run()
expect(artifact.listArtifacts).toHaveBeenCalledWith(
expect.objectContaining({
findBy: {
token,
workflowRunId: 789,
repositoryName: 'myrepo',
repositoryOwner: 'myorg'
}
})
)
})
test('throws error when repository format is invalid', async () => {
mockInputs({
[Inputs.GitHubToken]: 'some-token',
[Inputs.Repository]: 'invalid-format' // Missing the owner/repo format
})
await expect(run()).rejects.toThrow(
"Invalid repository: 'invalid-format'. Must be in format owner/repo"
)
})
test('warns when digest validation fails', async () => {
const mockArtifact = {
id: 123,
name: 'corrupted-artifact',
size: 1024,
digest: 'abc123'
}
jest
.spyOn(artifact, 'getArtifact')
.mockImplementation(() => Promise.resolve({artifact: mockArtifact}))
jest
.spyOn(artifact, 'downloadArtifact')
.mockImplementation(() => Promise.resolve({digestMismatch: true}))
await run()
expect(core.warning).toHaveBeenCalledWith(
expect.stringContaining('digest validation failed')
)
})
test('downloads a single artifact by ID', async () => {
const mockArtifact = {
id: 456,
name: 'artifact-by-id',
size: 1024,
digest: 'def456'
}
mockInputs({
[Inputs.Name]: '',
[Inputs.Pattern]: '',
[Inputs.ArtifactIds]: '456'
})
jest.spyOn(artifact, 'listArtifacts').mockImplementation(() =>
Promise.resolve({
artifacts: [mockArtifact]
})
)
await run()
expect(core.info).toHaveBeenCalledWith('Downloading artifacts by ID')
expect(core.debug).toHaveBeenCalledWith('Parsed artifact IDs: ["456"]')
expect(artifact.downloadArtifact).toHaveBeenCalledTimes(1)
expect(artifact.downloadArtifact).toHaveBeenCalledWith(
456,
expect.objectContaining({
expectedHash: mockArtifact.digest
})
)
expect(core.info).toHaveBeenCalledWith('Total of 1 artifact(s) downloaded')
})
test('downloads multiple artifacts by ID', async () => {
const mockArtifacts = [
{id: 123, name: 'first-artifact', size: 1024, digest: 'abc123'},
{id: 456, name: 'second-artifact', size: 2048, digest: 'def456'},
{id: 789, name: 'third-artifact', size: 3072, digest: 'ghi789'}
]
mockInputs({
[Inputs.Name]: '',
[Inputs.Pattern]: '',
[Inputs.ArtifactIds]: '123, 456, 789'
})
jest.spyOn(artifact, 'listArtifacts').mockImplementation(() =>
Promise.resolve({
artifacts: mockArtifacts
})
)
await run()
expect(core.info).toHaveBeenCalledWith('Downloading artifacts by ID')
expect(core.debug).toHaveBeenCalledWith(
'Parsed artifact IDs: ["123","456","789"]'
)
expect(artifact.downloadArtifact).toHaveBeenCalledTimes(3)
mockArtifacts.forEach(mockArtifact => {
expect(artifact.downloadArtifact).toHaveBeenCalledWith(
mockArtifact.id,
expect.objectContaining({
expectedHash: mockArtifact.digest
})
)
})
expect(core.info).toHaveBeenCalledWith('Total of 3 artifact(s) downloaded')
})
test('warns when some artifact IDs are not found', async () => {
const mockArtifacts = [
{id: 123, name: 'found-artifact', size: 1024, digest: 'abc123'}
]
mockInputs({
[Inputs.Name]: '',
[Inputs.Pattern]: '',
[Inputs.ArtifactIds]: '123, 456, 789'
})
jest.spyOn(artifact, 'listArtifacts').mockImplementation(() =>
Promise.resolve({
artifacts: mockArtifacts
})
)
await run()
expect(core.warning).toHaveBeenCalledWith(
'Could not find the following artifact IDs: 456, 789'
)
expect(core.debug).toHaveBeenCalledWith('Found 1 artifacts by ID')
expect(artifact.downloadArtifact).toHaveBeenCalledTimes(1)
})
test('throws error when no artifacts with requested IDs are found', async () => {
mockInputs({
[Inputs.Name]: '',
[Inputs.Pattern]: '',
[Inputs.ArtifactIds]: '123, 456'
})
jest.spyOn(artifact, 'listArtifacts').mockImplementation(() =>
Promise.resolve({
artifacts: []
})
)
await expect(run()).rejects.toThrow(
'None of the provided artifact IDs were found'
)
})
test('throws error when artifact-ids input is empty', async () => {
mockInputs({
[Inputs.Name]: '',
[Inputs.Pattern]: '',
[Inputs.ArtifactIds]: ' '
})
await expect(run()).rejects.toThrow(
"No valid artifact IDs provided in 'artifact-ids' input"
)
})
test('throws error when some artifact IDs are not valid numbers', async () => {
mockInputs({
[Inputs.Name]: '',
[Inputs.Pattern]: '',
[Inputs.ArtifactIds]: '123, abc, 456'
})
await expect(run()).rejects.toThrow(
"Invalid artifact ID: 'abc'. Must be a number."
)
})
test('throws error when both name and artifact-ids are provided', async () => {
mockInputs({
[Inputs.Name]: 'some-artifact',
[Inputs.ArtifactIds]: '123'
})
await expect(run()).rejects.toThrow(
"Inputs 'name' and 'artifact-ids' cannot be used together. Please specify only one."
)
})
})

View File

@ -5,6 +5,9 @@ inputs:
name:
description: 'Name of the artifact to download. If unspecified, all artifacts for the run are downloaded.'
required: false
artifact-ids:
description: 'IDs of the artifacts to download, comma-separated. Either inputs `artifact-ids` or `name` can be used, but not both.'
required: false
path:
description: 'Destination path. Supports basic tilde expansion. Defaults to $GITHUB_WORKSPACE'
required: false

481
dist/index.js vendored
View File

@ -824,7 +824,7 @@ __exportStar(__nccwpck_require__(49773), exports);
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = void 0;
exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = exports.FinalizeMigratedArtifactResponse = exports.FinalizeMigratedArtifactRequest = exports.MigrateArtifactResponse = exports.MigrateArtifactRequest = void 0;
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3)
// tslint:disable
@ -838,6 +838,236 @@ const wrappers_1 = __nccwpck_require__(8626);
const wrappers_2 = __nccwpck_require__(8626);
const timestamp_1 = __nccwpck_require__(54622);
// @generated message type with reflection information, may provide speed optimized methods
class MigrateArtifactRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.MigrateArtifactRequest", [
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp }
]);
}
create(value) {
const message = { workflowRunBackendId: "", name: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string workflow_run_backend_id */ 1:
message.workflowRunBackendId = reader.string();
break;
case /* string name */ 2:
message.name = reader.string();
break;
case /* google.protobuf.Timestamp expires_at */ 3:
message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt);
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* string workflow_run_backend_id = 1; */
if (message.workflowRunBackendId !== "")
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
/* string name = 2; */
if (message.name !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.name);
/* google.protobuf.Timestamp expires_at = 3; */
if (message.expiresAt)
timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(3, runtime_1.WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactRequest
*/
exports.MigrateArtifactRequest = new MigrateArtifactRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class MigrateArtifactResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.MigrateArtifactResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { ok: false, signedUploadUrl: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* string signed_upload_url */ 2:
message.signedUploadUrl = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
/* string signed_upload_url = 2; */
if (message.signedUploadUrl !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.MigrateArtifactResponse
*/
exports.MigrateArtifactResponse = new MigrateArtifactResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FinalizeMigratedArtifactRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.FinalizeMigratedArtifactRequest", [
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value) {
const message = { workflowRunBackendId: "", name: "", size: "0" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string workflow_run_backend_id */ 1:
message.workflowRunBackendId = reader.string();
break;
case /* string name */ 2:
message.name = reader.string();
break;
case /* int64 size */ 3:
message.size = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* string workflow_run_backend_id = 1; */
if (message.workflowRunBackendId !== "")
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
/* string name = 2; */
if (message.name !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.name);
/* int64 size = 3; */
if (message.size !== "0")
writer.tag(3, runtime_1.WireType.Varint).int64(message.size);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactRequest
*/
exports.FinalizeMigratedArtifactRequest = new FinalizeMigratedArtifactRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class FinalizeMigratedArtifactResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.FinalizeMigratedArtifactResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value) {
const message = { ok: false, artifactId: "0" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* int64 artifact_id */ 2:
message.artifactId = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
/* int64 artifact_id = 2; */
if (message.artifactId !== "0")
writer.tag(2, runtime_1.WireType.Varint).int64(message.artifactId);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeMigratedArtifactResponse
*/
exports.FinalizeMigratedArtifactResponse = new FinalizeMigratedArtifactResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class CreateArtifactRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.CreateArtifactRequest", [
@ -1219,7 +1449,8 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
{ no: 3, name: "database_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 4, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 5, name: "size", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp }
{ no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp },
{ no: 7, name: "digest", kind: "message", T: () => wrappers_2.StringValue }
]);
}
create(value) {
@ -1252,6 +1483,9 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
case /* google.protobuf.Timestamp created_at */ 6:
message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt);
break;
case /* google.protobuf.StringValue digest */ 7:
message.digest = wrappers_2.StringValue.internalBinaryRead(reader, reader.uint32(), options, message.digest);
break;
default:
let u = options.readUnknownField;
if (u === "throw")
@ -1282,6 +1516,9 @@ class ListArtifactsResponse_MonolithArtifact$Type extends runtime_5.MessageType
/* google.protobuf.Timestamp created_at = 6; */
if (message.createdAt)
timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join();
/* google.protobuf.StringValue digest = 7; */
if (message.digest)
wrappers_2.StringValue.internalBinaryWrite(message.digest, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join();
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
@ -1523,7 +1760,9 @@ exports.ArtifactService = new runtime_rpc_1.ServiceType("github.actions.results.
{ name: "FinalizeArtifact", options: {}, I: exports.FinalizeArtifactRequest, O: exports.FinalizeArtifactResponse },
{ name: "ListArtifacts", options: {}, I: exports.ListArtifactsRequest, O: exports.ListArtifactsResponse },
{ name: "GetSignedArtifactURL", options: {}, I: exports.GetSignedArtifactURLRequest, O: exports.GetSignedArtifactURLResponse },
{ name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse }
{ name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse },
{ name: "MigrateArtifact", options: {}, I: exports.MigrateArtifactRequest, O: exports.MigrateArtifactResponse },
{ name: "FinalizeMigratedArtifact", options: {}, I: exports.FinalizeMigratedArtifactRequest, O: exports.FinalizeMigratedArtifactResponse }
]);
//# sourceMappingURL=artifact.js.map
@ -1920,6 +2159,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
const promises_1 = __importDefault(__nccwpck_require__(73292));
const crypto = __importStar(__nccwpck_require__(6113));
const stream = __importStar(__nccwpck_require__(12781));
const github = __importStar(__nccwpck_require__(95438));
const core = __importStar(__nccwpck_require__(42186));
const httpClient = __importStar(__nccwpck_require__(96255));
@ -1956,8 +2197,7 @@ function streamExtract(url, directory) {
let retryCount = 0;
while (retryCount < 5) {
try {
yield streamExtractExternal(url, directory);
return;
return yield streamExtractExternal(url, directory);
}
catch (error) {
retryCount++;
@ -1977,12 +2217,18 @@ function streamExtractExternal(url, directory) {
throw new Error(`Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}`);
}
const timeout = 30 * 1000; // 30 seconds
let sha256Digest = undefined;
return new Promise((resolve, reject) => {
const timerFn = () => {
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
};
const timer = setTimeout(timerFn, timeout);
response.message
const hashStream = crypto.createHash('sha256').setEncoding('hex');
const passThrough = new stream.PassThrough();
response.message.pipe(passThrough);
passThrough.pipe(hashStream);
const extractStream = passThrough;
extractStream
.on('data', () => {
timer.refresh();
})
@ -1994,7 +2240,12 @@ function streamExtractExternal(url, directory) {
.pipe(unzip_stream_1.default.Extract({ path: directory }))
.on('close', () => {
clearTimeout(timer);
resolve();
if (hashStream) {
hashStream.end();
sha256Digest = hashStream.read();
core.info(`SHA256 digest of downloaded artifact is ${sha256Digest}`);
}
resolve({ sha256Digest: `sha256:${sha256Digest}` });
})
.on('error', (error) => {
reject(error);
@ -2007,6 +2258,7 @@ function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, tok
return __awaiter(this, void 0, void 0, function* () {
const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
const api = github.getOctokit(token);
let digestMismatch = false;
core.info(`Downloading artifact '${artifactId}' from '${repositoryOwner}/${repositoryName}'`);
const { headers, status } = yield api.rest.actions.downloadArtifact({
owner: repositoryOwner,
@ -2027,13 +2279,20 @@ function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, tok
core.info(`Redirecting to blob download url: ${scrubQueryParameters(location)}`);
try {
core.info(`Starting download of artifact to: ${downloadPath}`);
yield streamExtract(location, downloadPath);
const extractResponse = yield streamExtract(location, downloadPath);
core.info(`Artifact download completed successfully.`);
if (options === null || options === void 0 ? void 0 : options.expectedHash) {
if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) {
digestMismatch = true;
core.debug(`Computed digest: ${extractResponse.sha256Digest}`);
core.debug(`Expected digest: ${options.expectedHash}`);
}
}
}
catch (error) {
throw new Error(`Unable to download and extract artifact: ${error.message}`);
}
return { downloadPath };
return { downloadPath, digestMismatch };
});
}
exports.downloadArtifactPublic = downloadArtifactPublic;
@ -2041,6 +2300,7 @@ function downloadArtifactInternal(artifactId, options) {
return __awaiter(this, void 0, void 0, function* () {
const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
let digestMismatch = false;
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
const listReq = {
workflowRunBackendId,
@ -2063,13 +2323,20 @@ function downloadArtifactInternal(artifactId, options) {
core.info(`Redirecting to blob download url: ${scrubQueryParameters(signedUrl)}`);
try {
core.info(`Starting download of artifact to: ${downloadPath}`);
yield streamExtract(signedUrl, downloadPath);
const extractResponse = yield streamExtract(signedUrl, downloadPath);
core.info(`Artifact download completed successfully.`);
if (options === null || options === void 0 ? void 0 : options.expectedHash) {
if ((options === null || options === void 0 ? void 0 : options.expectedHash) !== extractResponse.sha256Digest) {
digestMismatch = true;
core.debug(`Computed digest: ${extractResponse.sha256Digest}`);
core.debug(`Expected digest: ${options.expectedHash}`);
}
}
}
catch (error) {
throw new Error(`Unable to download and extract artifact: ${error.message}`);
}
return { downloadPath };
return { downloadPath, digestMismatch };
});
}
exports.downloadArtifactInternal = downloadArtifactInternal;
@ -2175,13 +2442,17 @@ function getArtifactPublic(artifactName, workflowRunId, repositoryOwner, reposit
name: artifact.name,
id: artifact.id,
size: artifact.size_in_bytes,
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
createdAt: artifact.created_at
? new Date(artifact.created_at)
: undefined,
digest: artifact.digest
}
};
});
}
exports.getArtifactPublic = getArtifactPublic;
function getArtifactInternal(artifactName) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
@ -2208,7 +2479,8 @@ function getArtifactInternal(artifactName) {
size: Number(artifact.size),
createdAt: artifact.createdAt
? generated_1.Timestamp.toDate(artifact.createdAt)
: undefined
: undefined,
digest: (_a = artifact.digest) === null || _a === void 0 ? void 0 : _a.value
}
};
});
@ -2262,7 +2534,7 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
};
const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog);
let currentPageNumber = 1;
const { data: listArtifactResponse } = yield github.rest.actions.listWorkflowRunArtifacts({
const { data: listArtifactResponse } = yield github.request('GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts', {
owner: repositoryOwner,
repo: repositoryName,
run_id: workflowRunId,
@ -2281,14 +2553,18 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
name: artifact.name,
id: artifact.id,
size: artifact.size_in_bytes,
createdAt: artifact.created_at ? new Date(artifact.created_at) : undefined
createdAt: artifact.created_at
? new Date(artifact.created_at)
: undefined,
digest: artifact.digest
});
}
// Move to the next page
currentPageNumber++;
// Iterate over any remaining pages
for (currentPageNumber; currentPageNumber < numberOfPages; currentPageNumber++) {
currentPageNumber++;
(0, core_1.debug)(`Fetching page ${currentPageNumber} of artifact list`);
const { data: listArtifactResponse } = yield github.rest.actions.listWorkflowRunArtifacts({
const { data: listArtifactResponse } = yield github.request('GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts', {
owner: repositoryOwner,
repo: repositoryName,
run_id: workflowRunId,
@ -2302,7 +2578,8 @@ function listArtifactsPublic(workflowRunId, repositoryOwner, repositoryName, tok
size: artifact.size_in_bytes,
createdAt: artifact.created_at
? new Date(artifact.created_at)
: undefined
: undefined,
digest: artifact.digest
});
}
}
@ -2325,14 +2602,18 @@ function listArtifactsInternal(latest = false) {
workflowJobRunBackendId
};
const res = yield artifactClient.ListArtifacts(req);
let artifacts = res.artifacts.map(artifact => ({
name: artifact.name,
id: Number(artifact.databaseId),
size: Number(artifact.size),
createdAt: artifact.createdAt
? generated_1.Timestamp.toDate(artifact.createdAt)
: undefined
}));
let artifacts = res.artifacts.map(artifact => {
var _a;
return ({
name: artifact.name,
id: Number(artifact.databaseId),
size: Number(artifact.size),
createdAt: artifact.createdAt
? generated_1.Timestamp.toDate(artifact.createdAt)
: undefined,
digest: (_a = artifact.digest) === null || _a === void 0 ? void 0 : _a.value
});
});
if (latest) {
artifacts = filterLatest(artifacts);
}
@ -2444,6 +2725,7 @@ const generated_1 = __nccwpck_require__(49960);
const config_1 = __nccwpck_require__(74610);
const user_agent_1 = __nccwpck_require__(85164);
const errors_1 = __nccwpck_require__(38182);
const util_1 = __nccwpck_require__(63062);
class ArtifactHttpClient {
constructor(userAgent, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) {
this.maxAttempts = 5;
@ -2496,6 +2778,7 @@ class ArtifactHttpClient {
(0, core_1.debug)(`[Response] - ${response.message.statusCode}`);
(0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`);
const body = JSON.parse(rawBody);
(0, util_1.maskSecretUrls)(body);
(0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`);
if (this.isSuccessStatusCode(statusCode)) {
return { response, body };
@ -2812,10 +3095,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getBackendIdsFromToken = void 0;
exports.maskSecretUrls = exports.maskSigUrl = exports.getBackendIdsFromToken = void 0;
const core = __importStar(__nccwpck_require__(42186));
const config_1 = __nccwpck_require__(74610);
const jwt_decode_1 = __importDefault(__nccwpck_require__(84329));
const core_1 = __nccwpck_require__(42186);
const InvalidJwtError = new Error('Failed to get backend IDs: The provided JWT token is invalid and/or missing claims');
// uses the JWT token claims to get the
// workflow run and workflow job run backend ids
@ -2864,6 +3148,74 @@ function getBackendIdsFromToken() {
throw InvalidJwtError;
}
exports.getBackendIdsFromToken = getBackendIdsFromToken;
/**
* Masks the `sig` parameter in a URL and sets it as a secret.
*
* @param url - The URL containing the signature parameter to mask
* @remarks
* This function attempts to parse the provided URL and identify the 'sig' query parameter.
* If found, it registers both the raw and URL-encoded signature values as secrets using
* the Actions `setSecret` API, which prevents them from being displayed in logs.
*
* The function handles errors gracefully if URL parsing fails, logging them as debug messages.
*
* @example
* ```typescript
* // Mask a signature in an Azure SAS token URL
* maskSigUrl('https://example.blob.core.windows.net/container/file.txt?sig=abc123&se=2023-01-01');
* ```
*/
function maskSigUrl(url) {
if (!url)
return;
try {
const parsedUrl = new URL(url);
const signature = parsedUrl.searchParams.get('sig');
if (signature) {
(0, core_1.setSecret)(signature);
(0, core_1.setSecret)(encodeURIComponent(signature));
}
}
catch (error) {
(0, core_1.debug)(`Failed to parse URL: ${url} ${error instanceof Error ? error.message : String(error)}`);
}
}
exports.maskSigUrl = maskSigUrl;
/**
* Masks sensitive information in URLs containing signature parameters.
* Currently supports masking 'sig' parameters in the 'signed_upload_url'
* and 'signed_download_url' properties of the provided object.
*
* @param body - The object should contain a signature
* @remarks
* This function extracts URLs from the object properties and calls maskSigUrl
* on each one to redact sensitive signature information. The function doesn't
* modify the original object; it only marks the signatures as secrets for
* logging purposes.
*
* @example
* ```typescript
* const responseBody = {
* signed_upload_url: 'https://example.com?sig=abc123',
* signed_download_url: 'https://example.com?sig=def456'
* };
* maskSecretUrls(responseBody);
* ```
*/
function maskSecretUrls(body) {
if (typeof body !== 'object' || body === null) {
(0, core_1.debug)('body is not an object or is null');
return;
}
if ('signed_upload_url' in body &&
typeof body.signed_upload_url === 'string') {
maskSigUrl(body.signed_upload_url);
}
if ('signed_url' in body && typeof body.signed_url === 'string') {
maskSigUrl(body.signed_url);
}
}
exports.maskSecretUrls = maskSecretUrls;
//# sourceMappingURL=util.js.map
/***/ }),
@ -2970,7 +3322,7 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
core.info('Finished uploading artifact content to blob storage!');
hashStream.end();
sha256Hash = hashStream.read();
core.info(`SHA256 hash of uploaded artifact zip is ${sha256Hash}`);
core.info(`SHA256 digest of uploaded artifact zip is ${sha256Hash}`);
if (uploadByteCount === 0) {
core.warning(`No data was uploaded to blob storage. Reported upload byte count is 0.`);
}
@ -118358,6 +118710,7 @@ var Inputs;
Inputs["RunID"] = "run-id";
Inputs["Pattern"] = "pattern";
Inputs["MergeMultiple"] = "merge-multiple";
Inputs["ArtifactIds"] = "artifact-ids";
})(Inputs || (exports.Inputs = Inputs = {}));
var Outputs;
(function (Outputs) {
@ -118408,7 +118761,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.chunk = void 0;
exports.run = exports.chunk = void 0;
const os = __importStar(__nccwpck_require__(22037));
const path = __importStar(__nccwpck_require__(71017));
const core = __importStar(__nccwpck_require__(42186));
@ -118431,7 +118784,10 @@ function run() {
repository: core.getInput(constants_1.Inputs.Repository, { required: false }),
runID: parseInt(core.getInput(constants_1.Inputs.RunID, { required: false })),
pattern: core.getInput(constants_1.Inputs.Pattern, { required: false }),
mergeMultiple: core.getBooleanInput(constants_1.Inputs.MergeMultiple, { required: false })
mergeMultiple: core.getBooleanInput(constants_1.Inputs.MergeMultiple, {
required: false
}),
artifactIds: core.getInput(constants_1.Inputs.ArtifactIds, { required: false })
};
if (!inputs.path) {
inputs.path = process.env['GITHUB_WORKSPACE'] || process.cwd();
@ -118439,7 +118795,12 @@ function run() {
if (inputs.path.startsWith(`~`)) {
inputs.path = inputs.path.replace('~', os.homedir());
}
// Check for mutually exclusive inputs
if (inputs.name && inputs.artifactIds) {
throw new Error(`Inputs 'name' and 'artifact-ids' cannot be used together. Please specify only one.`);
}
const isSingleArtifactDownload = !!inputs.name;
const isDownloadByIds = !!inputs.artifactIds;
const resolvedPath = path.resolve(inputs.path);
core.debug(`Resolved path is ${resolvedPath}`);
const options = {};
@ -118456,6 +118817,7 @@ function run() {
};
}
let artifacts = [];
let artifactIds = [];
if (isSingleArtifactDownload) {
core.info(`Downloading single artifact`);
const { artifact: targetArtifact } = yield artifact_1.default.getArtifact(inputs.name, options);
@ -118465,6 +118827,37 @@ function run() {
core.debug(`Found named artifact '${inputs.name}' (ID: ${targetArtifact.id}, Size: ${targetArtifact.size})`);
artifacts = [targetArtifact];
}
else if (isDownloadByIds) {
core.info(`Downloading artifacts by ID`);
const artifactIdList = inputs.artifactIds
.split(',')
.map(id => id.trim())
.filter(id => id !== '');
if (artifactIdList.length === 0) {
throw new Error(`No valid artifact IDs provided in 'artifact-ids' input`);
}
core.debug(`Parsed artifact IDs: ${JSON.stringify(artifactIdList)}`);
// Parse the artifact IDs
artifactIds = artifactIdList.map(id => {
const numericId = parseInt(id, 10);
if (isNaN(numericId)) {
throw new Error(`Invalid artifact ID: '${id}'. Must be a number.`);
}
return numericId;
});
// We need to fetch all artifacts to get metadata for the specified IDs
const listArtifactResponse = yield artifact_1.default.listArtifacts(Object.assign({ latest: true }, options));
artifacts = listArtifactResponse.artifacts.filter(artifact => artifactIds.includes(artifact.id));
if (artifacts.length === 0) {
throw new Error(`None of the provided artifact IDs were found`);
}
if (artifacts.length < artifactIds.length) {
const foundIds = artifacts.map(a => a.id);
const missingIds = artifactIds.filter(id => !foundIds.includes(id));
core.warning(`Could not find the following artifact IDs: ${missingIds.join(', ')}`);
}
core.debug(`Found ${artifacts.length} artifacts by ID`);
}
else {
const listArtifactResponse = yield artifact_1.default.listArtifacts(Object.assign({ latest: true }, options));
artifacts = listArtifactResponse.artifacts;
@ -118476,7 +118869,7 @@ function run() {
core.debug(`Filtered from ${listArtifactResponse.artifacts.length} to ${artifacts.length} artifacts`);
}
else {
core.info('No input name or pattern filtered specified, downloading all artifacts');
core.info('No input name, artifact-ids or pattern filtered specified, downloading all artifacts');
if (!inputs.mergeMultiple) {
core.info('An extra directory with the artifact name will be created for each download');
}
@ -118485,21 +118878,33 @@ function run() {
if (artifacts.length) {
core.info(`Preparing to download the following artifacts:`);
artifacts.forEach(artifact => {
core.info(`- ${artifact.name} (ID: ${artifact.id}, Size: ${artifact.size})`);
core.info(`- ${artifact.name} (ID: ${artifact.id}, Size: ${artifact.size}, Expected Digest: ${artifact.digest})`);
});
}
const downloadPromises = artifacts.map(artifact => artifact_1.default.downloadArtifact(artifact.id, Object.assign(Object.assign({}, options), { path: isSingleArtifactDownload || inputs.mergeMultiple
? resolvedPath
: path.join(resolvedPath, artifact.name) })));
const downloadPromises = artifacts.map(artifact => ({
name: artifact.name,
promise: artifact_1.default.downloadArtifact(artifact.id, Object.assign(Object.assign({}, options), { path: isSingleArtifactDownload || inputs.mergeMultiple
? resolvedPath
: path.join(resolvedPath, artifact.name), expectedHash: artifact.digest }))
}));
const chunkedPromises = (0, exports.chunk)(downloadPromises, PARALLEL_DOWNLOADS);
for (const chunk of chunkedPromises) {
yield Promise.all(chunk);
const chunkPromises = chunk.map(item => item.promise);
const results = yield Promise.all(chunkPromises);
for (let i = 0; i < results.length; i++) {
const outcome = results[i];
const artifactName = chunk[i].name;
if (outcome.digestMismatch) {
core.warning(`Artifact '${artifactName}' digest validation failed. Please verify the integrity of the artifact.`);
}
}
}
core.info(`Total of ${artifacts.length} artifact(s) downloaded`);
core.setOutput(constants_1.Outputs.DownloadPath, resolvedPath);
core.info('Download artifact has finished successfully');
});
}
exports.run = run;
run().catch(err => core.setFailed(`Unable to download artifact(s): ${err.message}`));
@ -128475,7 +128880,7 @@ module.exports = index;
/***/ ((module) => {
"use strict";
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.2.2","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","jwt-decode":"^3.1.2","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.3.2","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","jwt-decode":"^3.1.2","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
/***/ }),
@ -128553,4 +128958,4 @@ module.exports = JSON.parse('[[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"]
/******/ module.exports = __webpack_exports__;
/******/
/******/ })()
;
;

View File

@ -4,6 +4,7 @@
- [Multiple uploads to the same named Artifact](#multiple-uploads-to-the-same-named-artifact)
- [Overwriting an Artifact](#overwriting-an-artifact)
- [Merging multiple artifacts](#merging-multiple-artifacts)
- [Working with Immutable Artifacts](#working-with-immutable-artifacts)
Several behavioral differences exist between Artifact actions `v3` and below vs `v4`. This document outlines common scenarios in `v3`, and how they would be handled in `v4`.
@ -207,3 +208,46 @@ jobs:
```
Note that this will download all artifacts to a temporary directory and reupload them as a single artifact. For more information on inputs and other use cases for `actions/upload-artifact/merge@v4`, see [the action documentation](https://github.com/actions/upload-artifact/blob/main/merge/README.md).
## Working with Immutable Artifacts
In `v4`, artifacts are immutable by default and each artifact gets a unique ID when uploaded. When an artifact with the same name is uploaded again (with or without `overwrite: true`), it gets a new artifact ID.
To take advantage of this immutability for security purposes (to avoid potential TOCTOU issues where an artifact might be replaced between upload and download), the new `artifact-ids` input allows you to download artifacts by their specific ID rather than by name:
```yaml
jobs:
upload:
runs-on: ubuntu-latest
# Make the artifact ID available to the download job
outputs:
artifact-id: ${{ steps.upload-step.outputs.artifact-id }}
steps:
- name: Create a file
run: echo "hello world" > my-file.txt
- name: Upload Artifact
id: upload-step
uses: actions/upload-artifact@v4
with:
name: my-artifact
path: my-file.txt
# The upload step outputs the artifact ID
- name: Print Artifact ID
run: echo "Artifact ID is ${{ steps.upload-step.outputs.artifact-id }}"
download:
needs: upload
runs-on: ubuntu-latest
steps:
- name: Download Artifact by ID
uses: actions/download-artifact@v4
with:
# Use the artifact ID directly, not the name, to ensure you get exactly the artifact you expect
artifact-ids: ${{ needs.upload.outputs.artifact-id }}
```
This approach provides stronger guarantees about which artifact version you're downloading compared to using just the artifact name.

12
jest.config.ts Normal file
View File

@ -0,0 +1,12 @@
module.exports = {
clearMocks: true,
moduleFileExtensions: ['js', 'ts'],
roots: ['<rootDir>'],
testEnvironment: 'node',
testMatch: ['**/*.test.ts'],
testRunner: 'jest-circus/runner',
transform: {
'^.+\\.ts$': 'ts-jest'
},
verbose: true
}

8323
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "download-artifact",
"version": "4.1.9",
"version": "4.3.0",
"description": "Download an Actions Artifact from a workflow run",
"main": "dist/index.js",
"scripts": {
@ -9,7 +9,8 @@
"check-all": "concurrently \"npm:format-check\" \"npm:lint\" \"npm:build\"",
"format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts",
"lint": "eslint **/*.ts"
"lint": "eslint **/*.ts",
"test": "jest"
},
"repository": {
"type": "git",
@ -28,12 +29,13 @@
},
"homepage": "https://github.com/actions/download-artifact#readme",
"dependencies": {
"@actions/artifact": "^2.2.2",
"@actions/artifact": "^2.3.2",
"@actions/core": "^1.10.1",
"@actions/github": "^5.1.1",
"minimatch": "^9.0.3"
},
"devDependencies": {
"@types/jest": "^29.5.14",
"@types/node": "^12.12.6",
"@typescript-eslint/eslint-plugin": "^6.14.0",
"@vercel/ncc": "^0.33.4",
@ -41,7 +43,10 @@
"eslint": "^8.55.0",
"eslint-plugin-github": "^4.10.1",
"eslint-plugin-prettier": "^5.0.1",
"jest": "^29.7.0",
"prettier": "^3.1.1",
"ts-jest": "^29.2.6",
"ts-node": "^10.9.2",
"typescript": "^5.3.3"
}
}

View File

@ -5,7 +5,8 @@ export enum Inputs {
Repository = 'repository',
RunID = 'run-id',
Pattern = 'pattern',
MergeMultiple = 'merge-multiple'
MergeMultiple = 'merge-multiple',
ArtifactIds = 'artifact-ids'
}
export enum Outputs {

View File

@ -15,7 +15,7 @@ export const chunk = <T>(arr: T[], n: number): T[][] =>
return acc
}, [] as T[][])
async function run(): Promise<void> {
export async function run(): Promise<void> {
const inputs = {
name: core.getInput(Inputs.Name, {required: false}),
path: core.getInput(Inputs.Path, {required: false}),
@ -23,7 +23,10 @@ async function run(): Promise<void> {
repository: core.getInput(Inputs.Repository, {required: false}),
runID: parseInt(core.getInput(Inputs.RunID, {required: false})),
pattern: core.getInput(Inputs.Pattern, {required: false}),
mergeMultiple: core.getBooleanInput(Inputs.MergeMultiple, {required: false})
mergeMultiple: core.getBooleanInput(Inputs.MergeMultiple, {
required: false
}),
artifactIds: core.getInput(Inputs.ArtifactIds, {required: false})
}
if (!inputs.path) {
@ -34,7 +37,15 @@ async function run(): Promise<void> {
inputs.path = inputs.path.replace('~', os.homedir())
}
// Check for mutually exclusive inputs
if (inputs.name && inputs.artifactIds) {
throw new Error(
`Inputs 'name' and 'artifact-ids' cannot be used together. Please specify only one.`
)
}
const isSingleArtifactDownload = !!inputs.name
const isDownloadByIds = !!inputs.artifactIds
const resolvedPath = path.resolve(inputs.path)
core.debug(`Resolved path is ${resolvedPath}`)
@ -56,6 +67,7 @@ async function run(): Promise<void> {
}
let artifacts: Artifact[] = []
let artifactIds: number[] = []
if (isSingleArtifactDownload) {
core.info(`Downloading single artifact`)
@ -74,6 +86,52 @@ async function run(): Promise<void> {
)
artifacts = [targetArtifact]
} else if (isDownloadByIds) {
core.info(`Downloading artifacts by ID`)
const artifactIdList = inputs.artifactIds
.split(',')
.map(id => id.trim())
.filter(id => id !== '')
if (artifactIdList.length === 0) {
throw new Error(`No valid artifact IDs provided in 'artifact-ids' input`)
}
core.debug(`Parsed artifact IDs: ${JSON.stringify(artifactIdList)}`)
// Parse the artifact IDs
artifactIds = artifactIdList.map(id => {
const numericId = parseInt(id, 10)
if (isNaN(numericId)) {
throw new Error(`Invalid artifact ID: '${id}'. Must be a number.`)
}
return numericId
})
// We need to fetch all artifacts to get metadata for the specified IDs
const listArtifactResponse = await artifactClient.listArtifacts({
latest: true,
...options
})
artifacts = listArtifactResponse.artifacts.filter(artifact =>
artifactIds.includes(artifact.id)
)
if (artifacts.length === 0) {
throw new Error(`None of the provided artifact IDs were found`)
}
if (artifacts.length < artifactIds.length) {
const foundIds = artifacts.map(a => a.id)
const missingIds = artifactIds.filter(id => !foundIds.includes(id))
core.warning(
`Could not find the following artifact IDs: ${missingIds.join(', ')}`
)
}
core.debug(`Found ${artifacts.length} artifacts by ID`)
} else {
const listArtifactResponse = await artifactClient.listArtifacts({
latest: true,
@ -92,7 +150,7 @@ async function run(): Promise<void> {
)
} else {
core.info(
'No input name or pattern filtered specified, downloading all artifacts'
'No input name, artifact-ids or pattern filtered specified, downloading all artifacts'
)
if (!inputs.mergeMultiple) {
core.info(
@ -106,26 +164,39 @@ async function run(): Promise<void> {
core.info(`Preparing to download the following artifacts:`)
artifacts.forEach(artifact => {
core.info(
`- ${artifact.name} (ID: ${artifact.id}, Size: ${artifact.size})`
`- ${artifact.name} (ID: ${artifact.id}, Size: ${artifact.size}, Expected Digest: ${artifact.digest})`
)
})
}
const downloadPromises = artifacts.map(artifact =>
artifactClient.downloadArtifact(artifact.id, {
const downloadPromises = artifacts.map(artifact => ({
name: artifact.name,
promise: artifactClient.downloadArtifact(artifact.id, {
...options,
path:
isSingleArtifactDownload || inputs.mergeMultiple
? resolvedPath
: path.join(resolvedPath, artifact.name)
: path.join(resolvedPath, artifact.name),
expectedHash: artifact.digest
})
)
}))
const chunkedPromises = chunk(downloadPromises, PARALLEL_DOWNLOADS)
for (const chunk of chunkedPromises) {
await Promise.all(chunk)
}
const chunkPromises = chunk.map(item => item.promise)
const results = await Promise.all(chunkPromises)
for (let i = 0; i < results.length; i++) {
const outcome = results[i]
const artifactName = chunk[i].name
if (outcome.digestMismatch) {
core.warning(
`Artifact '${artifactName}' digest validation failed. Please verify the integrity of the artifact.`
)
}
}
}
core.info(`Total of ${artifacts.length} artifact(s) downloaded`)
core.setOutput(Outputs.DownloadPath, resolvedPath)
core.info('Download artifact has finished successfully')

View File

@ -9,5 +9,5 @@
"moduleResolution": "node",
"esModuleInterop": true
},
"exclude": ["node_modules", "**/*.test.ts"]
"exclude": ["node_modules", "**/*.test.ts", "jest.config.ts", "__tests__"]
}