1
0
Fork 0
mirror of https://code.forgejo.org/actions/upload-artifact.git synced 2024-11-14 01:26:17 +01:00

add new overwrite input & docs

This commit is contained in:
Rob Herley 2024-01-18 13:31:03 -05:00
parent 1eb3cb2b3e
commit 11ff42c7b1
No known key found for this signature in database
GPG key ID: D1602042C3543B06
10 changed files with 536 additions and 26 deletions

View file

@ -24,6 +24,7 @@ See also [download-artifact](https://github.com/actions/download-artifact).
- [Using Outputs](#using-outputs) - [Using Outputs](#using-outputs)
- [Example output between steps](#example-output-between-steps) - [Example output between steps](#example-output-between-steps)
- [Example output between jobs](#example-output-between-jobs) - [Example output between jobs](#example-output-between-jobs)
- [Overwriting an Artifact](#overwriting-an-artifact)
- [Limitations](#limitations) - [Limitations](#limitations)
- [Number of Artifacts](#number-of-artifacts) - [Number of Artifacts](#number-of-artifacts)
- [Zip archives](#zip-archives) - [Zip archives](#zip-archives)
@ -44,7 +45,7 @@ For more information, see the [`@actions/artifact`](https://github.com/actions/t
1. Uploads are significantly faster, upwards of 90% improvement in worst case scenarios. 1. Uploads are significantly faster, upwards of 90% improvement in worst case scenarios.
2. Once uploaded, an Artifact ID is returned and Artifacts are immediately available in the UI and [REST API](https://docs.github.com/en/rest/actions/artifacts). Previously, you would have to wait for the run to be completed before an ID was available or any APIs could be utilized. 2. Once uploaded, an Artifact ID is returned and Artifacts are immediately available in the UI and [REST API](https://docs.github.com/en/rest/actions/artifacts). Previously, you would have to wait for the run to be completed before an ID was available or any APIs could be utilized.
3. The contents of an Artifact are uploaded together into an _immutable_ archive. They cannot be altered by subsequent jobs. Both of these factors help reduce the possibility of accidentally corrupting Artifact files. 3. The contents of an Artifact are uploaded together into an _immutable_ archive. They cannot be altered by subsequent jobs unless the Artifacts are deleted and recreated (where they will have a new ID). Both of these factors help reduce the possibility of accidentally corrupting Artifact files.
4. The compression level of an Artifact can be manually tweaked for speed or size reduction. 4. The compression level of an Artifact can be manually tweaked for speed or size reduction.
### Breaking Changes ### Breaking Changes
@ -365,6 +366,36 @@ jobs:
run: echo "Artifact ID from previous job is $OUTPUT1" run: echo "Artifact ID from previous job is $OUTPUT1"
``` ```
### Overwriting an Artifact
Although it's not possible to mutate an Artifact, can completely overwrite one. But do note that this will give the Artifact a new ID, the previous one will no longer exist:
```yaml
jobs:
upload:
runs-on: ubuntu-latest
steps:
- name: Create a file
run: echo "hello world" > my-file.txt
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: my-artifact # NOTE: same artifact name
path: my-file.txt
upload-again:
needs: upload
runs-on: ubuntu-latest
steps:
- name: Create a different file
run: echo "goodbye world" > my-file.txt
- name: Upload Artifact
uses: actions/upload-artifact@v4
with:
name: my-artifact # NOTE: same artifact name
path: my-file.txt
overwrite: true
```
## Limitations ## Limitations
### Number of Artifacts ### Number of Artifacts

View file

@ -34,6 +34,12 @@ inputs:
Higher levels will result in better compression, but will take longer to complete. Higher levels will result in better compression, but will take longer to complete.
For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads. For large files that are not easily compressed, a value of 0 is recommended for significantly faster uploads.
default: '6' default: '6'
overwrite:
description: >
If true, an artifact matching name will be deleted before a new one is uploaded.
If false, the action will fail if an artifact for the given name already exists.
Does not fail if the artifact does not exist.
default: 'false'
outputs: outputs:
artifact-id: artifact-id:

406
dist/index.js vendored
View file

@ -824,7 +824,7 @@ __exportStar(__nccwpck_require__(63077), exports);
"use strict"; "use strict";
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.ArtifactService = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = void 0; exports.ArtifactService = exports.DeleteArtifactResponse = exports.DeleteArtifactRequest = exports.GetSignedArtifactURLResponse = exports.GetSignedArtifactURLRequest = exports.ListArtifactsResponse_MonolithArtifact = exports.ListArtifactsResponse = exports.ListArtifactsRequest = exports.FinalizeArtifactResponse = exports.FinalizeArtifactRequest = exports.CreateArtifactResponse = exports.CreateArtifactRequest = void 0;
// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies // @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies
// @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3) // @generated from protobuf file "results/api/v1/artifact.proto" (package "github.actions.results.api.v1", syntax proto3)
// tslint:disable // tslint:disable
@ -1400,6 +1400,121 @@ class GetSignedArtifactURLResponse$Type extends runtime_5.MessageType {
* @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse * @generated MessageType for protobuf message github.actions.results.api.v1.GetSignedArtifactURLResponse
*/ */
exports.GetSignedArtifactURLResponse = new GetSignedArtifactURLResponse$Type(); exports.GetSignedArtifactURLResponse = new GetSignedArtifactURLResponse$Type();
// @generated message type with reflection information, may provide speed optimized methods
class DeleteArtifactRequest$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.DeleteArtifactRequest", [
{ no: 1, name: "workflow_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 2, name: "workflow_job_run_backend_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* string workflow_run_backend_id */ 1:
message.workflowRunBackendId = reader.string();
break;
case /* string workflow_job_run_backend_id */ 2:
message.workflowJobRunBackendId = reader.string();
break;
case /* string name */ 3:
message.name = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* string workflow_run_backend_id = 1; */
if (message.workflowRunBackendId !== "")
writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId);
/* string workflow_job_run_backend_id = 2; */
if (message.workflowJobRunBackendId !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId);
/* string name = 3; */
if (message.name !== "")
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.name);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactRequest
*/
exports.DeleteArtifactRequest = new DeleteArtifactRequest$Type();
// @generated message type with reflection information, may provide speed optimized methods
class DeleteArtifactResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.DeleteArtifactResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "artifact_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
]);
}
create(value) {
const message = { ok: false, artifactId: "0" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
return message;
}
internalBinaryRead(reader, length, options, target) {
let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length;
while (reader.pos < end) {
let [fieldNo, wireType] = reader.tag();
switch (fieldNo) {
case /* bool ok */ 1:
message.ok = reader.bool();
break;
case /* int64 artifact_id */ 2:
message.artifactId = reader.int64().toString();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`);
let d = reader.skip(wireType);
if (u !== false)
(u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d);
}
}
return message;
}
internalBinaryWrite(message, writer, options) {
/* bool ok = 1; */
if (message.ok !== false)
writer.tag(1, runtime_1.WireType.Varint).bool(message.ok);
/* int64 artifact_id = 2; */
if (message.artifactId !== "0")
writer.tag(2, runtime_1.WireType.Varint).int64(message.artifactId);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
return writer;
}
}
/**
* @generated MessageType for protobuf message github.actions.results.api.v1.DeleteArtifactResponse
*/
exports.DeleteArtifactResponse = new DeleteArtifactResponse$Type();
/** /**
* @generated ServiceType for protobuf service github.actions.results.api.v1.ArtifactService * @generated ServiceType for protobuf service github.actions.results.api.v1.ArtifactService
*/ */
@ -1407,7 +1522,8 @@ exports.ArtifactService = new runtime_rpc_1.ServiceType("github.actions.results.
{ name: "CreateArtifact", options: {}, I: exports.CreateArtifactRequest, O: exports.CreateArtifactResponse }, { name: "CreateArtifact", options: {}, I: exports.CreateArtifactRequest, O: exports.CreateArtifactResponse },
{ name: "FinalizeArtifact", options: {}, I: exports.FinalizeArtifactRequest, O: exports.FinalizeArtifactResponse }, { name: "FinalizeArtifact", options: {}, I: exports.FinalizeArtifactRequest, O: exports.FinalizeArtifactResponse },
{ name: "ListArtifacts", options: {}, I: exports.ListArtifactsRequest, O: exports.ListArtifactsResponse }, { name: "ListArtifacts", options: {}, I: exports.ListArtifactsRequest, O: exports.ListArtifactsResponse },
{ name: "GetSignedArtifactURL", options: {}, I: exports.GetSignedArtifactURLRequest, O: exports.GetSignedArtifactURLResponse } { name: "GetSignedArtifactURL", options: {}, I: exports.GetSignedArtifactURLRequest, O: exports.GetSignedArtifactURLResponse },
{ name: "DeleteArtifact", options: {}, I: exports.DeleteArtifactRequest, O: exports.DeleteArtifactResponse }
]); ]);
//# sourceMappingURL=artifact.js.map //# sourceMappingURL=artifact.js.map
@ -1438,6 +1554,7 @@ class ArtifactServiceClientJSON {
this.FinalizeArtifact.bind(this); this.FinalizeArtifact.bind(this);
this.ListArtifacts.bind(this); this.ListArtifacts.bind(this);
this.GetSignedArtifactURL.bind(this); this.GetSignedArtifactURL.bind(this);
this.DeleteArtifact.bind(this);
} }
CreateArtifact(request) { CreateArtifact(request) {
const data = artifact_1.CreateArtifactRequest.toJson(request, { const data = artifact_1.CreateArtifactRequest.toJson(request, {
@ -1477,6 +1594,16 @@ class ArtifactServiceClientJSON {
ignoreUnknownFields: true, ignoreUnknownFields: true,
})); }));
} }
DeleteArtifact(request) {
const data = artifact_1.DeleteArtifactRequest.toJson(request, {
useProtoFieldName: true,
emitDefaultValues: false,
});
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "DeleteArtifact", "application/json", data);
return promise.then((data) => artifact_1.DeleteArtifactResponse.fromJson(data, {
ignoreUnknownFields: true,
}));
}
} }
exports.ArtifactServiceClientJSON = ArtifactServiceClientJSON; exports.ArtifactServiceClientJSON = ArtifactServiceClientJSON;
class ArtifactServiceClientProtobuf { class ArtifactServiceClientProtobuf {
@ -1486,6 +1613,7 @@ class ArtifactServiceClientProtobuf {
this.FinalizeArtifact.bind(this); this.FinalizeArtifact.bind(this);
this.ListArtifacts.bind(this); this.ListArtifacts.bind(this);
this.GetSignedArtifactURL.bind(this); this.GetSignedArtifactURL.bind(this);
this.DeleteArtifact.bind(this);
} }
CreateArtifact(request) { CreateArtifact(request) {
const data = artifact_1.CreateArtifactRequest.toBinary(request); const data = artifact_1.CreateArtifactRequest.toBinary(request);
@ -1507,6 +1635,11 @@ class ArtifactServiceClientProtobuf {
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "GetSignedArtifactURL", "application/protobuf", data); const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "GetSignedArtifactURL", "application/protobuf", data);
return promise.then((data) => artifact_1.GetSignedArtifactURLResponse.fromBinary(data)); return promise.then((data) => artifact_1.GetSignedArtifactURLResponse.fromBinary(data));
} }
DeleteArtifact(request) {
const data = artifact_1.DeleteArtifactRequest.toBinary(request);
const promise = this.rpc.request("github.actions.results.api.v1.ArtifactService", "DeleteArtifact", "application/protobuf", data);
return promise.then((data) => artifact_1.DeleteArtifactResponse.fromBinary(data));
}
} }
exports.ArtifactServiceClientProtobuf = ArtifactServiceClientProtobuf; exports.ArtifactServiceClientProtobuf = ArtifactServiceClientProtobuf;
var ArtifactServiceMethod; var ArtifactServiceMethod;
@ -1515,12 +1648,14 @@ var ArtifactServiceMethod;
ArtifactServiceMethod["FinalizeArtifact"] = "FinalizeArtifact"; ArtifactServiceMethod["FinalizeArtifact"] = "FinalizeArtifact";
ArtifactServiceMethod["ListArtifacts"] = "ListArtifacts"; ArtifactServiceMethod["ListArtifacts"] = "ListArtifacts";
ArtifactServiceMethod["GetSignedArtifactURL"] = "GetSignedArtifactURL"; ArtifactServiceMethod["GetSignedArtifactURL"] = "GetSignedArtifactURL";
ArtifactServiceMethod["DeleteArtifact"] = "DeleteArtifact";
})(ArtifactServiceMethod || (exports.ArtifactServiceMethod = ArtifactServiceMethod = {})); })(ArtifactServiceMethod || (exports.ArtifactServiceMethod = ArtifactServiceMethod = {}));
exports.ArtifactServiceMethodList = [ exports.ArtifactServiceMethodList = [
ArtifactServiceMethod.CreateArtifact, ArtifactServiceMethod.CreateArtifact,
ArtifactServiceMethod.FinalizeArtifact, ArtifactServiceMethod.FinalizeArtifact,
ArtifactServiceMethod.ListArtifacts, ArtifactServiceMethod.ListArtifacts,
ArtifactServiceMethod.GetSignedArtifactURL, ArtifactServiceMethod.GetSignedArtifactURL,
ArtifactServiceMethod.DeleteArtifact,
]; ];
function createArtifactServiceServer(service) { function createArtifactServiceServer(service) {
return new twirp_ts_1.TwirpServer({ return new twirp_ts_1.TwirpServer({
@ -1558,6 +1693,12 @@ function matchArtifactServiceRoute(method, events) {
yield events.onMatch(ctx); yield events.onMatch(ctx);
return handleArtifactServiceGetSignedArtifactURLRequest(ctx, service, data, interceptors); return handleArtifactServiceGetSignedArtifactURLRequest(ctx, service, data, interceptors);
}); });
case "DeleteArtifact":
return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () {
ctx = Object.assign(Object.assign({}, ctx), { methodName: "DeleteArtifact" });
yield events.onMatch(ctx);
return handleArtifactServiceDeleteArtifactRequest(ctx, service, data, interceptors);
});
default: default:
events.onNotFound(); events.onNotFound();
const msg = `no handler found`; const msg = `no handler found`;
@ -1608,6 +1749,17 @@ function handleArtifactServiceGetSignedArtifactURLRequest(ctx, service, data, in
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
} }
} }
function handleArtifactServiceDeleteArtifactRequest(ctx, service, data, interceptors) {
switch (ctx.contentType) {
case twirp_ts_1.TwirpContentType.JSON:
return handleArtifactServiceDeleteArtifactJSON(ctx, service, data, interceptors);
case twirp_ts_1.TwirpContentType.Protobuf:
return handleArtifactServiceDeleteArtifactProtobuf(ctx, service, data, interceptors);
default:
const msg = "unexpected Content-Type";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg);
}
}
function handleArtifactServiceCreateArtifactJSON(ctx, service, data, interceptors) { function handleArtifactServiceCreateArtifactJSON(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
let request; let request;
@ -1732,6 +1884,37 @@ function handleArtifactServiceGetSignedArtifactURLJSON(ctx, service, data, inter
})); }));
}); });
} }
function handleArtifactServiceDeleteArtifactJSON(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
const body = JSON.parse(data.toString() || "{}");
request = artifact_1.DeleteArtifactRequest.fromJson(body, {
ignoreUnknownFields: true,
});
}
catch (e) {
if (e instanceof Error) {
const msg = "the json request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.DeleteArtifact(ctx, inputReq);
});
}
else {
response = yield service.DeleteArtifact(ctx, request);
}
return JSON.stringify(artifact_1.DeleteArtifactResponse.toJson(response, {
useProtoFieldName: true,
emitDefaultValues: false,
}));
});
}
function handleArtifactServiceCreateArtifactProtobuf(ctx, service, data, interceptors) { function handleArtifactServiceCreateArtifactProtobuf(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
let request; let request;
@ -1832,6 +2015,31 @@ function handleArtifactServiceGetSignedArtifactURLProtobuf(ctx, service, data, i
return Buffer.from(artifact_1.GetSignedArtifactURLResponse.toBinary(response)); return Buffer.from(artifact_1.GetSignedArtifactURLResponse.toBinary(response));
}); });
} }
function handleArtifactServiceDeleteArtifactProtobuf(ctx, service, data, interceptors) {
return __awaiter(this, void 0, void 0, function* () {
let request;
let response;
try {
request = artifact_1.DeleteArtifactRequest.fromBinary(data);
}
catch (e) {
if (e instanceof Error) {
const msg = "the protobuf request could not be decoded";
throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true);
}
}
if (interceptors && interceptors.length > 0) {
const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors);
response = yield interceptor(ctx, request, (ctx, inputReq) => {
return service.DeleteArtifact(ctx, inputReq);
});
}
else {
response = yield service.DeleteArtifact(ctx, request);
}
return Buffer.from(artifact_1.DeleteArtifactResponse.toBinary(response));
});
}
//# sourceMappingURL=artifact.twirp.js.map //# sourceMappingURL=artifact.twirp.js.map
/***/ }), /***/ }),
@ -1867,6 +2075,7 @@ const core_1 = __nccwpck_require__(42186);
const config_1 = __nccwpck_require__(74610); const config_1 = __nccwpck_require__(74610);
const upload_artifact_1 = __nccwpck_require__(42578); const upload_artifact_1 = __nccwpck_require__(42578);
const download_artifact_1 = __nccwpck_require__(73555); const download_artifact_1 = __nccwpck_require__(73555);
const delete_artifact_1 = __nccwpck_require__(70071);
const get_artifact_1 = __nccwpck_require__(29491); const get_artifact_1 = __nccwpck_require__(29491);
const list_artifacts_1 = __nccwpck_require__(44141); const list_artifacts_1 = __nccwpck_require__(44141);
const errors_1 = __nccwpck_require__(38182); const errors_1 = __nccwpck_require__(38182);
@ -1953,6 +2162,28 @@ If the error persists, please check whether Actions and API requests are operati
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information. Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`);
throw error;
}
});
}
deleteArtifact(artifactName, options) {
return __awaiter(this, void 0, void 0, function* () {
try {
if ((0, config_1.isGhes)()) {
throw new errors_1.GHESNotSupportedError();
}
if (options === null || options === void 0 ? void 0 : options.findBy) {
const { findBy: { repositoryOwner, repositoryName, workflowRunId, token } } = options;
return (0, delete_artifact_1.deleteArtifactPublic)(artifactName, workflowRunId, repositoryOwner, repositoryName, token);
}
return (0, delete_artifact_1.deleteArtifactInternal)(artifactName);
}
catch (error) {
(0, core_1.warning)(`Delete Artifact failed with error: ${error}.
Errors can be temporary, so please try again and optionally run the action with debug mode enabled for more information.
If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`); If the error persists, please check whether Actions and API requests are operating normally at [https://githubstatus.com](https://www.githubstatus.com).`);
throw error; throw error;
} }
@ -1964,6 +2195,96 @@ exports.DefaultArtifactClient = DefaultArtifactClient;
/***/ }), /***/ }),
/***/ 70071:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.deleteArtifactInternal = exports.deleteArtifactPublic = void 0;
const core_1 = __nccwpck_require__(42186);
const github_1 = __nccwpck_require__(21260);
const user_agent_1 = __nccwpck_require__(85164);
const retry_options_1 = __nccwpck_require__(64597);
const utils_1 = __nccwpck_require__(58154);
const plugin_request_log_1 = __nccwpck_require__(68883);
const plugin_retry_1 = __nccwpck_require__(86298);
const artifact_twirp_client_1 = __nccwpck_require__(12312);
const util_1 = __nccwpck_require__(63062);
const generated_1 = __nccwpck_require__(49960);
const get_artifact_1 = __nccwpck_require__(29491);
const errors_1 = __nccwpck_require__(38182);
function deleteArtifactPublic(artifactName, workflowRunId, repositoryOwner, repositoryName, token) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const [retryOpts, requestOpts] = (0, retry_options_1.getRetryOptions)(utils_1.defaults);
const opts = {
log: undefined,
userAgent: (0, user_agent_1.getUserAgentString)(),
previews: undefined,
retry: retryOpts,
request: requestOpts
};
const github = (0, github_1.getOctokit)(token, opts, plugin_retry_1.retry, plugin_request_log_1.requestLog);
const getArtifactResp = yield (0, get_artifact_1.getArtifactPublic)(artifactName, workflowRunId, repositoryOwner, repositoryName, token);
const deleteArtifactResp = yield github.rest.actions.deleteArtifact({
owner: repositoryOwner,
repo: repositoryName,
artifact_id: getArtifactResp.artifact.id
});
if (deleteArtifactResp.status !== 204) {
throw new errors_1.InvalidResponseError(`Invalid response from GitHub API: ${deleteArtifactResp.status} (${(_a = deleteArtifactResp === null || deleteArtifactResp === void 0 ? void 0 : deleteArtifactResp.headers) === null || _a === void 0 ? void 0 : _a['x-github-request-id']})`);
}
return {
id: getArtifactResp.artifact.id
};
});
}
exports.deleteArtifactPublic = deleteArtifactPublic;
function deleteArtifactInternal(artifactName) {
return __awaiter(this, void 0, void 0, function* () {
const artifactClient = (0, artifact_twirp_client_1.internalArtifactTwirpClient)();
const { workflowRunBackendId, workflowJobRunBackendId } = (0, util_1.getBackendIdsFromToken)();
const listReq = {
workflowRunBackendId,
workflowJobRunBackendId,
nameFilter: generated_1.StringValue.create({ value: artifactName })
};
const listRes = yield artifactClient.ListArtifacts(listReq);
if (listRes.artifacts.length === 0) {
throw new errors_1.ArtifactNotFoundError(`Artifact not found for name: ${artifactName}`);
}
let artifact = listRes.artifacts[0];
if (listRes.artifacts.length > 1) {
artifact = listRes.artifacts.sort((a, b) => Number(b.databaseId) - Number(a.databaseId))[0];
(0, core_1.debug)(`More than one artifact found for a single name, returning newest (id: ${artifact.databaseId})`);
}
const req = {
workflowRunBackendId: artifact.workflowRunBackendId,
workflowJobRunBackendId: artifact.workflowJobRunBackendId,
name: artifact.name
};
const res = yield artifactClient.DeleteArtifact(req);
(0, core_1.info)(`Artifact '${artifactName}' (ID: ${res.artifactId}) deleted`);
return {
id: Number(res.artifactId)
};
});
}
exports.deleteArtifactInternal = deleteArtifactInternal;
//# sourceMappingURL=delete-artifact.js.map
/***/ }),
/***/ 73555: /***/ 73555:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
@ -2005,7 +2326,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod }; return (mod && mod.__esModule) ? mod : { "default": mod };
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.downloadArtifactInternal = exports.downloadArtifactPublic = void 0; exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
const promises_1 = __importDefault(__nccwpck_require__(73292)); const promises_1 = __importDefault(__nccwpck_require__(73292));
const github = __importStar(__nccwpck_require__(21260)); const github = __importStar(__nccwpck_require__(21260));
const core = __importStar(__nccwpck_require__(42186)); const core = __importStar(__nccwpck_require__(42186));
@ -2039,20 +2360,57 @@ function exists(path) {
}); });
} }
function streamExtract(url, directory) { function streamExtract(url, directory) {
return __awaiter(this, void 0, void 0, function* () {
let retryCount = 0;
while (retryCount < 5) {
try {
yield streamExtractExternal(url, directory);
return;
}
catch (error) {
retryCount++;
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
// wait 5 seconds before retrying
yield new Promise(resolve => setTimeout(resolve, 5000));
}
}
throw new Error(`Artifact download failed after ${retryCount} retries.`);
});
}
function streamExtractExternal(url, directory) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const client = new httpClient.HttpClient((0, user_agent_1.getUserAgentString)()); const client = new httpClient.HttpClient((0, user_agent_1.getUserAgentString)());
const response = yield client.get(url); const response = yield client.get(url);
if (response.message.statusCode !== 200) { if (response.message.statusCode !== 200) {
throw new Error(`Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}`); throw new Error(`Unexpected HTTP response from blob storage: ${response.message.statusCode} ${response.message.statusMessage}`);
} }
const timeout = 30 * 1000; // 30 seconds
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const timerFn = () => {
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
};
const timer = setTimeout(timerFn, timeout);
response.message response.message
.on('data', () => {
timer.refresh();
})
.on('error', (error) => {
core.debug(`response.message: Artifact download failed: ${error.message}`);
clearTimeout(timer);
reject(error);
})
.pipe(unzip_stream_1.default.Extract({ path: directory })) .pipe(unzip_stream_1.default.Extract({ path: directory }))
.on('close', resolve) .on('close', () => {
.on('error', reject); clearTimeout(timer);
resolve();
})
.on('error', (error) => {
reject(error);
});
}); });
}); });
} }
exports.streamExtractExternal = streamExtractExternal;
function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, token, options) { function downloadArtifactPublic(artifactId, repositoryOwner, repositoryName, token, options) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path); const downloadPath = yield resolveOrCreateDirectory(options === null || options === void 0 ? void 0 : options.path);
@ -2211,7 +2569,9 @@ function getArtifactPublic(artifactName, workflowRunId, repositoryOwner, reposit
throw new errors_1.InvalidResponseError(`Invalid response from GitHub API: ${getArtifactResp.status} (${(_a = getArtifactResp === null || getArtifactResp === void 0 ? void 0 : getArtifactResp.headers) === null || _a === void 0 ? void 0 : _a['x-github-request-id']})`); throw new errors_1.InvalidResponseError(`Invalid response from GitHub API: ${getArtifactResp.status} (${(_a = getArtifactResp === null || getArtifactResp === void 0 ? void 0 : getArtifactResp.headers) === null || _a === void 0 ? void 0 : _a['x-github-request-id']})`);
} }
if (getArtifactResp.data.artifacts.length === 0) { if (getArtifactResp.data.artifacts.length === 0) {
throw new errors_1.ArtifactNotFoundError(`Artifact not found for name: ${artifactName}`); throw new errors_1.ArtifactNotFoundError(`Artifact not found for name: ${artifactName}
Please ensure that your artifact is not expired and the artifact was uploaded using a compatible version of toolkit/upload-artifact.
For more information, visit the GitHub Artifacts FAQ: https://github.com/actions/toolkit/blob/main/packages/artifact/docs/faq.md`);
} }
let artifact = getArtifactResp.data.artifacts[0]; let artifact = getArtifactResp.data.artifacts[0];
if (getArtifactResp.data.artifacts.length > 1) { if (getArtifactResp.data.artifacts.length > 1) {
@ -2240,7 +2600,9 @@ function getArtifactInternal(artifactName) {
}; };
const res = yield artifactClient.ListArtifacts(req); const res = yield artifactClient.ListArtifacts(req);
if (res.artifacts.length === 0) { if (res.artifacts.length === 0) {
throw new errors_1.ArtifactNotFoundError(`Artifact not found for name: ${artifactName}`); throw new errors_1.ArtifactNotFoundError(`Artifact not found for name: ${artifactName}
Please ensure that your artifact is not expired and the artifact was uploaded using a compatible version of toolkit/upload-artifact.
For more information, visit the GitHub Artifacts FAQ: https://github.com/actions/toolkit/blob/main/packages/artifact/docs/faq.md`);
} }
let artifact = res.artifacts[0]; let artifact = res.artifacts[0];
if (res.artifacts.length > 1) { if (res.artifacts.length > 1) {
@ -126721,6 +127083,7 @@ var Inputs;
Inputs["IfNoFilesFound"] = "if-no-files-found"; Inputs["IfNoFilesFound"] = "if-no-files-found";
Inputs["RetentionDays"] = "retention-days"; Inputs["RetentionDays"] = "retention-days";
Inputs["CompressionLevel"] = "compression-level"; Inputs["CompressionLevel"] = "compression-level";
Inputs["Overwrite"] = "overwrite";
})(Inputs = exports.Inputs || (exports.Inputs = {})); })(Inputs = exports.Inputs || (exports.Inputs = {}));
var NoFileOptions; var NoFileOptions;
(function (NoFileOptions) { (function (NoFileOptions) {
@ -126779,6 +127142,7 @@ const constants_1 = __nccwpck_require__(69042);
function getInputs() { function getInputs() {
const name = core.getInput(constants_1.Inputs.Name); const name = core.getInput(constants_1.Inputs.Name);
const path = core.getInput(constants_1.Inputs.Path, { required: true }); const path = core.getInput(constants_1.Inputs.Path, { required: true });
const overwrite = core.getBooleanInput(constants_1.Inputs.Overwrite);
const ifNoFilesFound = core.getInput(constants_1.Inputs.IfNoFilesFound); const ifNoFilesFound = core.getInput(constants_1.Inputs.IfNoFilesFound);
const noFileBehavior = constants_1.NoFileOptions[ifNoFilesFound]; const noFileBehavior = constants_1.NoFileOptions[ifNoFilesFound];
if (!noFileBehavior) { if (!noFileBehavior) {
@ -126787,7 +127151,8 @@ function getInputs() {
const inputs = { const inputs = {
artifactName: name, artifactName: name,
searchPath: path, searchPath: path,
ifNoFilesFound: noFileBehavior ifNoFilesFound: noFileBehavior,
overwrite: overwrite
}; };
const retentionDaysStr = core.getInput(constants_1.Inputs.RetentionDays); const retentionDaysStr = core.getInput(constants_1.Inputs.RetentionDays);
if (retentionDaysStr) { if (retentionDaysStr) {
@ -127018,16 +127383,28 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
step((generator = generator.apply(thisArg, _arguments || [])).next()); step((generator = generator.apply(thisArg, _arguments || [])).next());
}); });
}; };
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
const core = __importStar(__nccwpck_require__(42186)); const core = __importStar(__nccwpck_require__(42186));
const github = __importStar(__nccwpck_require__(95438)); const github = __importStar(__nccwpck_require__(95438));
const artifact_1 = __importDefault(__nccwpck_require__(79450)); const artifact_1 = __importStar(__nccwpck_require__(79450));
const search_1 = __nccwpck_require__(13930); const search_1 = __nccwpck_require__(13930);
const input_helper_1 = __nccwpck_require__(46455); const input_helper_1 = __nccwpck_require__(46455);
const constants_1 = __nccwpck_require__(69042); const constants_1 = __nccwpck_require__(69042);
function deleteArtifactIfExists(artifactName) {
return __awaiter(this, void 0, void 0, function* () {
try {
yield artifact_1.default.deleteArtifact(artifactName);
}
catch (error) {
if (error instanceof artifact_1.ArtifactNotFoundError) {
core.debug(`Skipping deletion of '${artifactName}', it does not exist`);
return;
}
// Best effort, we don't want to fail the action if this fails
core.debug(`Unable to delete artifact: ${error.message}`);
}
});
}
function run() { function run() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
try { try {
@ -127054,6 +127431,9 @@ function run() {
const s = searchResult.filesToUpload.length === 1 ? '' : 's'; const s = searchResult.filesToUpload.length === 1 ? '' : 's';
core.info(`With the provided path, there will be ${searchResult.filesToUpload.length} file${s} uploaded`); core.info(`With the provided path, there will be ${searchResult.filesToUpload.length} file${s} uploaded`);
core.debug(`Root artifact directory is ${searchResult.rootDirectory}`); core.debug(`Root artifact directory is ${searchResult.rootDirectory}`);
if (inputs.overwrite) {
yield deleteArtifactIfExists(inputs.artifactName);
}
const options = {}; const options = {};
if (inputs.retentionDays) { if (inputs.retentionDays) {
options.retentionDays = inputs.retentionDays; options.retentionDays = inputs.retentionDays;
@ -128967,7 +129347,7 @@ module.exports = parseParams
/***/ ((module) => { /***/ ((module) => {
"use strict"; "use strict";
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.0.0","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^5.3.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}'); module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.0","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^5.3.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
/***/ }), /***/ }),

View file

@ -2,6 +2,7 @@
- [Migration](#migration) - [Migration](#migration)
- [Multiple uploads to the same named Artifact](#multiple-uploads-to-the-same-named-artifact) - [Multiple uploads to the same named Artifact](#multiple-uploads-to-the-same-named-artifact)
- [Overwriting an Artifact](#overwriting-an-artifact)
Several behavioral differences exist between Artifact actions `v3` and below vs `v4`. This document outlines common scenarios in `v3`, and how they would be handled in `v4`. Several behavioral differences exist between Artifact actions `v3` and below vs `v4`. This document outlines common scenarios in `v3`, and how they would be handled in `v4`.
@ -78,3 +79,66 @@ jobs:
``` ```
In `v4`, the new `pattern:` input will filter the downloaded Artifacts to match the name specified. The new `merge-multiple:` input will support downloading multiple Artifacts to the same directory. If the files within the Artifacts have the same name, the last writer wins. In `v4`, the new `pattern:` input will filter the downloaded Artifacts to match the name specified. The new `merge-multiple:` input will support downloading multiple Artifacts to the same directory. If the files within the Artifacts have the same name, the last writer wins.
## Overwriting an Artifact
In `v3`, the contents of an Artifact were mutable so something like the following was possible:
```yaml
jobs:
upload:
runs-on: ubuntu-latest
steps:
- name: Create a file
run: echo "hello world" > my-file.txt
- name: Upload Artifact
uses: actions/upload-artifact@v3
with:
name: my-artifact # NOTE: same artifact name
path: my-file.txt
upload-again:
needs: upload
runs-on: ubuntu-latest
steps:
- name: Create a different file
run: echo "goodbye world" > my-file.txt
- name: Upload Artifact
uses: actions/upload-artifact@v3
with:
name: my-artifact # NOTE: same artifact name
path: my-file.txt
```
The resulting `my-file.txt` in `my-artifact` will have "goodbye world" as the content.
In `v4`, Artifacts are immutable unless deleted. To achieve this same behavior, you can use `overwrite: true` to delete the Artifact before a new one is created:
```diff
jobs:
upload:
runs-on: ubuntu-latest
steps:
- name: Create a file
run: echo "hello world" > my-file.txt
- name: Upload Artifact
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: my-artifact # NOTE: same artifact name
path: my-file.txt
upload-again:
needs: upload
runs-on: ubuntu-latest
steps:
- name: Create a different file
run: echo "goodbye world" > my-file.txt
- name: Upload Artifact
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
name: my-artifact # NOTE: same artifact name
path: my-file.txt
+ overwrite: true
```
Note that this will create an _entirely_ new Artifact, with a different ID from the previous.

14
package-lock.json generated
View file

@ -9,7 +9,7 @@
"version": "4.0.0", "version": "4.0.0",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/artifact": "^2.0.0", "@actions/artifact": "^2.1.0",
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/github": "^6.0.0", "@actions/github": "^6.0.0",
"@actions/glob": "^0.3.0", "@actions/glob": "^0.3.0",
@ -33,9 +33,9 @@
} }
}, },
"node_modules/@actions/artifact": { "node_modules/@actions/artifact": {
"version": "2.0.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.0.0.tgz", "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.0.tgz",
"integrity": "sha512-rr1yJ7/2c4i/OdQ9G0MXJfV4svPgqe+UfNWOK2+jZghTVJatJJAlJskJFK+RqzTyX8qn6CWgG/w7tJRNDzNnhQ==", "integrity": "sha512-R7RI/5tnbOAWPrkKisB0DK58K5jMiizk1weoQQSN9m20dhCmRhGsQtOzNCVeAbOBXewn//1ddbGKKrtlSEvIUg==",
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/github": "^5.1.1", "@actions/github": "^5.1.1",
@ -7518,9 +7518,9 @@
}, },
"dependencies": { "dependencies": {
"@actions/artifact": { "@actions/artifact": {
"version": "2.0.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.0.0.tgz", "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.0.tgz",
"integrity": "sha512-rr1yJ7/2c4i/OdQ9G0MXJfV4svPgqe+UfNWOK2+jZghTVJatJJAlJskJFK+RqzTyX8qn6CWgG/w7tJRNDzNnhQ==", "integrity": "sha512-R7RI/5tnbOAWPrkKisB0DK58K5jMiizk1weoQQSN9m20dhCmRhGsQtOzNCVeAbOBXewn//1ddbGKKrtlSEvIUg==",
"requires": { "requires": {
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/github": "^5.1.1", "@actions/github": "^5.1.1",

View file

@ -1,6 +1,6 @@
{ {
"name": "upload-artifact", "name": "upload-artifact",
"version": "4.0.0", "version": "4.2.0",
"description": "Upload an Actions Artifact in a workflow run", "description": "Upload an Actions Artifact in a workflow run",
"main": "dist/index.js", "main": "dist/index.js",
"scripts": { "scripts": {
@ -29,7 +29,7 @@
}, },
"homepage": "https://github.com/actions/upload-artifact#readme", "homepage": "https://github.com/actions/upload-artifact#readme",
"dependencies": { "dependencies": {
"@actions/artifact": "^2.0.0", "@actions/artifact": "^2.1.0",
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/github": "^6.0.0", "@actions/github": "^6.0.0",
"@actions/glob": "^0.3.0", "@actions/glob": "^0.3.0",

View file

@ -4,7 +4,8 @@ export enum Inputs {
Path = 'path', Path = 'path',
IfNoFilesFound = 'if-no-files-found', IfNoFilesFound = 'if-no-files-found',
RetentionDays = 'retention-days', RetentionDays = 'retention-days',
CompressionLevel = 'compression-level' CompressionLevel = 'compression-level',
Overwrite = 'overwrite'
} }
export enum NoFileOptions { export enum NoFileOptions {

View file

@ -8,6 +8,7 @@ import {UploadInputs} from './upload-inputs'
export function getInputs(): UploadInputs { export function getInputs(): UploadInputs {
const name = core.getInput(Inputs.Name) const name = core.getInput(Inputs.Name)
const path = core.getInput(Inputs.Path, {required: true}) const path = core.getInput(Inputs.Path, {required: true})
const overwrite = core.getBooleanInput(Inputs.Overwrite)
const ifNoFilesFound = core.getInput(Inputs.IfNoFilesFound) const ifNoFilesFound = core.getInput(Inputs.IfNoFilesFound)
const noFileBehavior: NoFileOptions = NoFileOptions[ifNoFilesFound] const noFileBehavior: NoFileOptions = NoFileOptions[ifNoFilesFound]
@ -25,7 +26,8 @@ export function getInputs(): UploadInputs {
const inputs = { const inputs = {
artifactName: name, artifactName: name,
searchPath: path, searchPath: path,
ifNoFilesFound: noFileBehavior ifNoFilesFound: noFileBehavior,
overwrite: overwrite
} as UploadInputs } as UploadInputs
const retentionDaysStr = core.getInput(Inputs.RetentionDays) const retentionDaysStr = core.getInput(Inputs.RetentionDays)

View file

@ -1,10 +1,27 @@
import * as core from '@actions/core' import * as core from '@actions/core'
import * as github from '@actions/github' import * as github from '@actions/github'
import artifact, {UploadArtifactOptions} from '@actions/artifact' import artifact, {
UploadArtifactOptions,
ArtifactNotFoundError
} from '@actions/artifact'
import {findFilesToUpload} from './search' import {findFilesToUpload} from './search'
import {getInputs} from './input-helper' import {getInputs} from './input-helper'
import {NoFileOptions} from './constants' import {NoFileOptions} from './constants'
async function deleteArtifactIfExists(artifactName: string): Promise<void> {
try {
await artifact.deleteArtifact(artifactName)
} catch (error) {
if (error instanceof ArtifactNotFoundError) {
core.debug(`Skipping deletion of '${artifactName}', it does not exist`)
return
}
// Best effort, we don't want to fail the action if this fails
core.debug(`Unable to delete artifact: ${(error as Error).message}`)
}
}
async function run(): Promise<void> { async function run(): Promise<void> {
try { try {
const inputs = getInputs() const inputs = getInputs()
@ -38,6 +55,10 @@ async function run(): Promise<void> {
) )
core.debug(`Root artifact directory is ${searchResult.rootDirectory}`) core.debug(`Root artifact directory is ${searchResult.rootDirectory}`)
if (inputs.overwrite) {
await deleteArtifactIfExists(inputs.artifactName)
}
const options: UploadArtifactOptions = {} const options: UploadArtifactOptions = {}
if (inputs.retentionDays) { if (inputs.retentionDays) {
options.retentionDays = inputs.retentionDays options.retentionDays = inputs.retentionDays

View file

@ -25,4 +25,9 @@ export interface UploadInputs {
* The level of compression for Zlib to be applied to the artifact archive. * The level of compression for Zlib to be applied to the artifact archive.
*/ */
compressionLevel?: number compressionLevel?: number
/**
* Whether or not to replace an existing artifact with the same name
*/
overwrite: boolean
} }