mirror of
https://code.forgejo.org/actions/checkout.git
synced 2024-12-23 20:50:02 +01:00
.
This commit is contained in:
parent
d79ea53307
commit
093dbebc2e
2 changed files with 238 additions and 138 deletions
178
dist/index.js
vendored
178
dist/index.js
vendored
|
@ -8358,7 +8358,6 @@ const core = __importStar(__webpack_require__(470));
|
||||||
const exec = __importStar(__webpack_require__(986));
|
const exec = __importStar(__webpack_require__(986));
|
||||||
const fs = __importStar(__webpack_require__(747));
|
const fs = __importStar(__webpack_require__(747));
|
||||||
const github = __importStar(__webpack_require__(469));
|
const github = __importStar(__webpack_require__(469));
|
||||||
const https = __importStar(__webpack_require__(211));
|
|
||||||
const io = __importStar(__webpack_require__(1));
|
const io = __importStar(__webpack_require__(1));
|
||||||
const path = __importStar(__webpack_require__(622));
|
const path = __importStar(__webpack_require__(622));
|
||||||
const refHelper = __importStar(__webpack_require__(227));
|
const refHelper = __importStar(__webpack_require__(227));
|
||||||
|
@ -8371,28 +8370,44 @@ function downloadRepository(accessToken, owner, repo, ref, commit, repositoryPat
|
||||||
const runnerTemp = process.env['RUNNER_TEMP'];
|
const runnerTemp = process.env['RUNNER_TEMP'];
|
||||||
assert.ok(runnerTemp, 'RUNNER_TEMP not defined');
|
assert.ok(runnerTemp, 'RUNNER_TEMP not defined');
|
||||||
const archivePath = path.join(runnerTemp, 'checkout.tar.gz');
|
const archivePath = path.join(runnerTemp, 'checkout.tar.gz');
|
||||||
// await fs.promises.writeFile(archivePath, raw)
|
// Ensure file does not exist
|
||||||
// Get the archive URL using the REST API
|
core.debug(`Ensuring archive file does not exist: ${archivePath}`);
|
||||||
yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
|
yield io.rmRF(archivePath);
|
||||||
// Prepare the archive stream
|
// Download the archive
|
||||||
core.debug(`Preparing the archive stream: ${archivePath}`);
|
let archiveData = yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
|
||||||
yield io.rmRF(archivePath);
|
core.info('Downloading the archive using the REST API');
|
||||||
const fileStream = fs.createWriteStream(archivePath);
|
yield yield downloadArchive(accessToken, owner, repo, ref, commit);
|
||||||
const fileStreamClosed = getFileClosedPromise(fileStream);
|
|
||||||
try {
|
|
||||||
// Get the archive URL
|
|
||||||
core.info('Getting archive URL');
|
|
||||||
const archiveUrl = yield getArchiveUrl(accessToken, owner, repo, ref, commit);
|
|
||||||
// Download the archive
|
|
||||||
core.info('Downloading the archive'); // Do not print the archive URL because it has an embedded token
|
|
||||||
yield downloadFile(archiveUrl, fileStream);
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
fileStream.end();
|
|
||||||
yield fileStreamClosed;
|
|
||||||
}
|
|
||||||
// return Buffer.from(response.data) // response.data is ArrayBuffer
|
|
||||||
}));
|
}));
|
||||||
|
// Write archive to disk
|
||||||
|
core.info('Writing archive to disk');
|
||||||
|
yield fs.promises.writeFile(archivePath, archiveData);
|
||||||
|
archiveData = undefined;
|
||||||
|
// // Get the archive URL using the REST API
|
||||||
|
// await retryHelper.execute(async () => {
|
||||||
|
// // Prepare the archive stream
|
||||||
|
// core.debug(`Preparing the archive stream: ${archivePath}`)
|
||||||
|
// await io.rmRF(archivePath)
|
||||||
|
// const fileStream = fs.createWriteStream(archivePath)
|
||||||
|
// const fileStreamClosed = getFileClosedPromise(fileStream)
|
||||||
|
// try {
|
||||||
|
// // Get the archive URL
|
||||||
|
// core.info('Getting archive URL')
|
||||||
|
// const archiveUrl = await getArchiveUrl(
|
||||||
|
// accessToken,
|
||||||
|
// owner,
|
||||||
|
// repo,
|
||||||
|
// ref,
|
||||||
|
// commit
|
||||||
|
// )
|
||||||
|
// // Download the archive
|
||||||
|
// core.info('Downloading the archive') // Do not print the archive URL because it has an embedded token
|
||||||
|
// await downloadFile(archiveUrl, fileStream)
|
||||||
|
// } finally {
|
||||||
|
// fileStream.end()
|
||||||
|
// await fileStreamClosed
|
||||||
|
// }
|
||||||
|
// })
|
||||||
|
// return Buffer.from(response.data) // response.data is ArrayBuffer
|
||||||
// // Download the archive
|
// // Download the archive
|
||||||
// core.info('Downloading the archive') // Do not print the URL since it contains a token to download the archive
|
// core.info('Downloading the archive') // Do not print the URL since it contains a token to download the archive
|
||||||
// await downloadFile(archiveUrl, archivePath)
|
// await downloadFile(archiveUrl, archivePath)
|
||||||
|
@ -8443,11 +8458,10 @@ function downloadRepository(accessToken, owner, repo, ref, commit, repositoryPat
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadRepository = downloadRepository;
|
exports.downloadRepository = downloadRepository;
|
||||||
function getArchiveUrl(accessToken, owner, repo, ref, commit) {
|
function downloadArchive(accessToken, owner, repo, ref, commit) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const octokit = new github.GitHub(accessToken);
|
const octokit = new github.GitHub(accessToken);
|
||||||
const params = {
|
const params = {
|
||||||
method: 'HEAD',
|
|
||||||
owner: owner,
|
owner: owner,
|
||||||
repo: repo,
|
repo: repo,
|
||||||
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
||||||
|
@ -8457,51 +8471,87 @@ function getArchiveUrl(accessToken, owner, repo, ref, commit) {
|
||||||
console.log('GOT THE RESPONSE');
|
console.log('GOT THE RESPONSE');
|
||||||
console.log(`status=${response.status}`);
|
console.log(`status=${response.status}`);
|
||||||
console.log(`headers=${JSON.stringify(response.headers)}`);
|
console.log(`headers=${JSON.stringify(response.headers)}`);
|
||||||
console.log(`headers=${JSON.stringify(response.data)}`);
|
console.log(`data=${JSON.stringify(response.data)}`);
|
||||||
if (response.status != 200) {
|
if (response.status != 200) {
|
||||||
throw new Error(`Unexpected response from GitHub API. Status: '${response.status}'`);
|
throw new Error(`Unexpected response from GitHub API. Status: '${response.status}'`);
|
||||||
}
|
}
|
||||||
console.log('GETTING THE LOCATION');
|
return Buffer.from(response.data); // response.data is ArrayBuffer
|
||||||
const archiveUrl = response.headers['Location']; // Do not print the archive URL because it has an embedded token
|
// console.log('GETTING THE LOCATION')
|
||||||
assert.ok(archiveUrl, `Expected GitHub API response to contain 'Location' header`);
|
// const archiveUrl = response.headers['Location'] // Do not print the archive URL because it has an embedded token
|
||||||
return archiveUrl;
|
// assert.ok(
|
||||||
});
|
// archiveUrl,
|
||||||
}
|
// `Expected GitHub API response to contain 'Location' header`
|
||||||
function downloadFile(url, fileStream) {
|
// )
|
||||||
return new Promise((resolve, reject) => {
|
// return archiveUrl
|
||||||
try {
|
|
||||||
https.get(url, (response) => {
|
|
||||||
if (response.statusCode != 200) {
|
|
||||||
reject(`Request failed with status '${response.statusCode}'`);
|
|
||||||
response.resume(); // Consume response data to free up memory
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
response.on('data', chunk => {
|
|
||||||
fileStream.write(chunk);
|
|
||||||
});
|
|
||||||
response.on('end', () => {
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
response.on('error', err => {
|
|
||||||
reject(err);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
reject(err);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function getFileClosedPromise(stream) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
stream.on('error', err => {
|
|
||||||
reject(err);
|
|
||||||
});
|
|
||||||
stream.on('finish', () => {
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// async function getArchiveUrl(
|
||||||
|
// accessToken: string,
|
||||||
|
// owner: string,
|
||||||
|
// repo: string,
|
||||||
|
// ref: string,
|
||||||
|
// commit: string
|
||||||
|
// ): Promise<string> {
|
||||||
|
// const octokit = new github.GitHub(accessToken)
|
||||||
|
// const params: RequestOptions & ReposGetArchiveLinkParams = {
|
||||||
|
// method: 'HEAD',
|
||||||
|
// owner: owner,
|
||||||
|
// repo: repo,
|
||||||
|
// archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
||||||
|
// ref: refHelper.getDownloadRef(ref, commit)
|
||||||
|
// }
|
||||||
|
// const response = await octokit.repos.getArchiveLink(params)
|
||||||
|
// console.log('GOT THE RESPONSE')
|
||||||
|
// console.log(`status=${response.status}`)
|
||||||
|
// console.log(`headers=${JSON.stringify(response.headers)}`)
|
||||||
|
// console.log(`data=${JSON.stringify(response.data)}`)
|
||||||
|
// if (response.status != 200) {
|
||||||
|
// throw new Error(
|
||||||
|
// `Unexpected response from GitHub API. Status: '${response.status}'`
|
||||||
|
// )
|
||||||
|
// }
|
||||||
|
// console.log('GETTING THE LOCATION')
|
||||||
|
// const archiveUrl = response.headers['Location'] // Do not print the archive URL because it has an embedded token
|
||||||
|
// assert.ok(
|
||||||
|
// archiveUrl,
|
||||||
|
// `Expected GitHub API response to contain 'Location' header`
|
||||||
|
// )
|
||||||
|
// return archiveUrl
|
||||||
|
// }
|
||||||
|
// function downloadFile(url: string, fileStream: WriteStream): Promise<void> {
|
||||||
|
// return new Promise((resolve, reject) => {
|
||||||
|
// try {
|
||||||
|
// https.get(url, (response: IncomingMessage) => {
|
||||||
|
// if (response.statusCode != 200) {
|
||||||
|
// reject(`Request failed with status '${response.statusCode}'`)
|
||||||
|
// response.resume() // Consume response data to free up memory
|
||||||
|
// return
|
||||||
|
// }
|
||||||
|
// response.on('data', chunk => {
|
||||||
|
// fileStream.write(chunk)
|
||||||
|
// })
|
||||||
|
// response.on('end', () => {
|
||||||
|
// resolve()
|
||||||
|
// })
|
||||||
|
// response.on('error', err => {
|
||||||
|
// reject(err)
|
||||||
|
// })
|
||||||
|
// })
|
||||||
|
// } catch (err) {
|
||||||
|
// reject(err)
|
||||||
|
// }
|
||||||
|
// })
|
||||||
|
// }
|
||||||
|
// function getFileClosedPromise(stream: WriteStream): Promise<void> {
|
||||||
|
// return new Promise((resolve, reject) => {
|
||||||
|
// stream.on('error', err => {
|
||||||
|
// reject(err)
|
||||||
|
// })
|
||||||
|
// stream.on('finish', () => {
|
||||||
|
// resolve()
|
||||||
|
// })
|
||||||
|
// })
|
||||||
|
// }
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
|
@ -28,38 +28,52 @@ export async function downloadRepository(
|
||||||
const runnerTemp = process.env['RUNNER_TEMP'] as string
|
const runnerTemp = process.env['RUNNER_TEMP'] as string
|
||||||
assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
|
assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
|
||||||
const archivePath = path.join(runnerTemp, 'checkout.tar.gz')
|
const archivePath = path.join(runnerTemp, 'checkout.tar.gz')
|
||||||
// await fs.promises.writeFile(archivePath, raw)
|
|
||||||
|
|
||||||
// Get the archive URL using the REST API
|
// Ensure file does not exist
|
||||||
await retryHelper.execute(async () => {
|
core.debug(`Ensuring archive file does not exist: ${archivePath}`)
|
||||||
// Prepare the archive stream
|
await io.rmRF(archivePath)
|
||||||
core.debug(`Preparing the archive stream: ${archivePath}`)
|
|
||||||
await io.rmRF(archivePath)
|
|
||||||
const fileStream = fs.createWriteStream(archivePath)
|
|
||||||
const fileStreamClosed = getFileClosedPromise(fileStream)
|
|
||||||
|
|
||||||
try {
|
// Download the archive
|
||||||
// Get the archive URL
|
let archiveData = await retryHelper.execute(async () => {
|
||||||
core.info('Getting archive URL')
|
core.info('Downloading the archive using the REST API')
|
||||||
const archiveUrl = await getArchiveUrl(
|
await await downloadArchive(accessToken, owner, repo, ref, commit)
|
||||||
accessToken,
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
ref,
|
|
||||||
commit
|
|
||||||
)
|
|
||||||
|
|
||||||
// Download the archive
|
|
||||||
core.info('Downloading the archive') // Do not print the archive URL because it has an embedded token
|
|
||||||
await downloadFile(archiveUrl, fileStream)
|
|
||||||
} finally {
|
|
||||||
fileStream.end()
|
|
||||||
await fileStreamClosed
|
|
||||||
}
|
|
||||||
|
|
||||||
// return Buffer.from(response.data) // response.data is ArrayBuffer
|
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// Write archive to disk
|
||||||
|
core.info('Writing archive to disk')
|
||||||
|
await fs.promises.writeFile(archivePath, archiveData)
|
||||||
|
archiveData = undefined
|
||||||
|
|
||||||
|
// // Get the archive URL using the REST API
|
||||||
|
// await retryHelper.execute(async () => {
|
||||||
|
// // Prepare the archive stream
|
||||||
|
// core.debug(`Preparing the archive stream: ${archivePath}`)
|
||||||
|
// await io.rmRF(archivePath)
|
||||||
|
// const fileStream = fs.createWriteStream(archivePath)
|
||||||
|
// const fileStreamClosed = getFileClosedPromise(fileStream)
|
||||||
|
|
||||||
|
// try {
|
||||||
|
// // Get the archive URL
|
||||||
|
// core.info('Getting archive URL')
|
||||||
|
// const archiveUrl = await getArchiveUrl(
|
||||||
|
// accessToken,
|
||||||
|
// owner,
|
||||||
|
// repo,
|
||||||
|
// ref,
|
||||||
|
// commit
|
||||||
|
// )
|
||||||
|
|
||||||
|
// // Download the archive
|
||||||
|
// core.info('Downloading the archive') // Do not print the archive URL because it has an embedded token
|
||||||
|
// await downloadFile(archiveUrl, fileStream)
|
||||||
|
// } finally {
|
||||||
|
// fileStream.end()
|
||||||
|
// await fileStreamClosed
|
||||||
|
// }
|
||||||
|
// })
|
||||||
|
|
||||||
|
// return Buffer.from(response.data) // response.data is ArrayBuffer
|
||||||
|
|
||||||
// // Download the archive
|
// // Download the archive
|
||||||
// core.info('Downloading the archive') // Do not print the URL since it contains a token to download the archive
|
// core.info('Downloading the archive') // Do not print the URL since it contains a token to download the archive
|
||||||
// await downloadFile(archiveUrl, archivePath)
|
// await downloadFile(archiveUrl, archivePath)
|
||||||
|
@ -120,16 +134,15 @@ export async function downloadRepository(
|
||||||
} as ExecOptions)
|
} as ExecOptions)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getArchiveUrl(
|
async function downloadArchive(
|
||||||
accessToken: string,
|
accessToken: string,
|
||||||
owner: string,
|
owner: string,
|
||||||
repo: string,
|
repo: string,
|
||||||
ref: string,
|
ref: string,
|
||||||
commit: string
|
commit: string
|
||||||
): Promise<string> {
|
): Promise<Buffer> {
|
||||||
const octokit = new github.GitHub(accessToken)
|
const octokit = new github.GitHub(accessToken)
|
||||||
const params: RequestOptions & ReposGetArchiveLinkParams = {
|
const params: ReposGetArchiveLinkParams = {
|
||||||
method: 'HEAD',
|
|
||||||
owner: owner,
|
owner: owner,
|
||||||
repo: repo,
|
repo: repo,
|
||||||
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
||||||
|
@ -139,54 +152,91 @@ async function getArchiveUrl(
|
||||||
console.log('GOT THE RESPONSE')
|
console.log('GOT THE RESPONSE')
|
||||||
console.log(`status=${response.status}`)
|
console.log(`status=${response.status}`)
|
||||||
console.log(`headers=${JSON.stringify(response.headers)}`)
|
console.log(`headers=${JSON.stringify(response.headers)}`)
|
||||||
console.log(`headers=${JSON.stringify(response.data)}`)
|
console.log(`data=${JSON.stringify(response.data)}`)
|
||||||
if (response.status != 200) {
|
if (response.status != 200) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Unexpected response from GitHub API. Status: '${response.status}'`
|
`Unexpected response from GitHub API. Status: '${response.status}'`
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
console.log('GETTING THE LOCATION')
|
|
||||||
const archiveUrl = response.headers['Location'] // Do not print the archive URL because it has an embedded token
|
return Buffer.from(response.data) // response.data is ArrayBuffer
|
||||||
assert.ok(
|
|
||||||
archiveUrl,
|
// console.log('GETTING THE LOCATION')
|
||||||
`Expected GitHub API response to contain 'Location' header`
|
// const archiveUrl = response.headers['Location'] // Do not print the archive URL because it has an embedded token
|
||||||
)
|
// assert.ok(
|
||||||
return archiveUrl
|
// archiveUrl,
|
||||||
|
// `Expected GitHub API response to contain 'Location' header`
|
||||||
|
// )
|
||||||
|
// return archiveUrl
|
||||||
}
|
}
|
||||||
|
|
||||||
function downloadFile(url: string, fileStream: WriteStream): Promise<void> {
|
// async function getArchiveUrl(
|
||||||
return new Promise((resolve, reject) => {
|
// accessToken: string,
|
||||||
try {
|
// owner: string,
|
||||||
https.get(url, (response: IncomingMessage) => {
|
// repo: string,
|
||||||
if (response.statusCode != 200) {
|
// ref: string,
|
||||||
reject(`Request failed with status '${response.statusCode}'`)
|
// commit: string
|
||||||
response.resume() // Consume response data to free up memory
|
// ): Promise<string> {
|
||||||
return
|
// const octokit = new github.GitHub(accessToken)
|
||||||
}
|
// const params: RequestOptions & ReposGetArchiveLinkParams = {
|
||||||
|
// method: 'HEAD',
|
||||||
|
// owner: owner,
|
||||||
|
// repo: repo,
|
||||||
|
// archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
|
||||||
|
// ref: refHelper.getDownloadRef(ref, commit)
|
||||||
|
// }
|
||||||
|
// const response = await octokit.repos.getArchiveLink(params)
|
||||||
|
// console.log('GOT THE RESPONSE')
|
||||||
|
// console.log(`status=${response.status}`)
|
||||||
|
// console.log(`headers=${JSON.stringify(response.headers)}`)
|
||||||
|
// console.log(`data=${JSON.stringify(response.data)}`)
|
||||||
|
// if (response.status != 200) {
|
||||||
|
// throw new Error(
|
||||||
|
// `Unexpected response from GitHub API. Status: '${response.status}'`
|
||||||
|
// )
|
||||||
|
// }
|
||||||
|
// console.log('GETTING THE LOCATION')
|
||||||
|
// const archiveUrl = response.headers['Location'] // Do not print the archive URL because it has an embedded token
|
||||||
|
// assert.ok(
|
||||||
|
// archiveUrl,
|
||||||
|
// `Expected GitHub API response to contain 'Location' header`
|
||||||
|
// )
|
||||||
|
// return archiveUrl
|
||||||
|
// }
|
||||||
|
|
||||||
response.on('data', chunk => {
|
// function downloadFile(url: string, fileStream: WriteStream): Promise<void> {
|
||||||
fileStream.write(chunk)
|
// return new Promise((resolve, reject) => {
|
||||||
})
|
// try {
|
||||||
response.on('end', () => {
|
// https.get(url, (response: IncomingMessage) => {
|
||||||
resolve()
|
// if (response.statusCode != 200) {
|
||||||
})
|
// reject(`Request failed with status '${response.statusCode}'`)
|
||||||
response.on('error', err => {
|
// response.resume() // Consume response data to free up memory
|
||||||
reject(err)
|
// return
|
||||||
})
|
// }
|
||||||
})
|
|
||||||
} catch (err) {
|
|
||||||
reject(err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
function getFileClosedPromise(stream: WriteStream): Promise<void> {
|
// response.on('data', chunk => {
|
||||||
return new Promise((resolve, reject) => {
|
// fileStream.write(chunk)
|
||||||
stream.on('error', err => {
|
// })
|
||||||
reject(err)
|
// response.on('end', () => {
|
||||||
})
|
// resolve()
|
||||||
stream.on('finish', () => {
|
// })
|
||||||
resolve()
|
// response.on('error', err => {
|
||||||
})
|
// reject(err)
|
||||||
})
|
// })
|
||||||
}
|
// })
|
||||||
|
// } catch (err) {
|
||||||
|
// reject(err)
|
||||||
|
// }
|
||||||
|
// })
|
||||||
|
// }
|
||||||
|
|
||||||
|
// function getFileClosedPromise(stream: WriteStream): Promise<void> {
|
||||||
|
// return new Promise((resolve, reject) => {
|
||||||
|
// stream.on('error', err => {
|
||||||
|
// reject(err)
|
||||||
|
// })
|
||||||
|
// stream.on('finish', () => {
|
||||||
|
// resolve()
|
||||||
|
// })
|
||||||
|
// })
|
||||||
|
// }
|
||||||
|
|
Loading…
Reference in a new issue