Compare commits

..

No commits in common. "096b500552f8ef398fe070364e7232be81bacad5" and "4a1fa615de57a7420d8c8dea5f6ec494b3e5bf41" have entirely different histories.

2 changed files with 188 additions and 255 deletions

206
dist/index.js vendored
View File

@ -8358,6 +8358,7 @@ const core = __importStar(__webpack_require__(470));
const exec = __importStar(__webpack_require__(986)); const exec = __importStar(__webpack_require__(986));
const fs = __importStar(__webpack_require__(747)); const fs = __importStar(__webpack_require__(747));
const github = __importStar(__webpack_require__(469)); const github = __importStar(__webpack_require__(469));
const https = __importStar(__webpack_require__(211));
const io = __importStar(__webpack_require__(1)); const io = __importStar(__webpack_require__(1));
const path = __importStar(__webpack_require__(622)); const path = __importStar(__webpack_require__(622));
const refHelper = __importStar(__webpack_require__(227)); const refHelper = __importStar(__webpack_require__(227));
@ -8370,49 +8371,63 @@ function downloadRepository(accessToken, owner, repo, ref, commit, repositoryPat
const runnerTemp = process.env['RUNNER_TEMP']; const runnerTemp = process.env['RUNNER_TEMP'];
assert.ok(runnerTemp, 'RUNNER_TEMP not defined'); assert.ok(runnerTemp, 'RUNNER_TEMP not defined');
const archivePath = path.join(runnerTemp, 'checkout.tar.gz'); const archivePath = path.join(runnerTemp, 'checkout.tar.gz');
// Ensure file does not exist // await fs.promises.writeFile(archivePath, raw)
core.debug(`Ensuring archive file does not exist: ${archivePath}`); // Get the archive URL using the REST API
yield io.rmRF(archivePath); yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () {
// Download the archive // Prepare the archive stream
let archiveData = yield retryHelper.execute(() => __awaiter(this, void 0, void 0, function* () { core.debug(`Preparing the archive stream: ${archivePath}`);
core.info('Downloading the archive using the REST API'); yield io.rmRF(archivePath);
return yield downloadArchive(accessToken, owner, repo, ref, commit); const fileStream = fs.createWriteStream(archivePath);
const fileStreamClosed = getFileClosedPromise(fileStream);
try {
// Get the archive URL using the GitHub REST API
core.info('Getting archive URL from GitHub REST API');
const octokit = new github.GitHub(accessToken);
const params = {
method: 'HEAD',
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
owner: owner,
repo: repo,
ref: refHelper.getDownloadRef(ref, commit)
};
const response = yield octokit.repos.getArchiveLink(params);
console.log('GOT THE RESPONSE');
if (response.status != 302) {
throw new Error(`Unexpected response from GitHub API. Status: '${response.status}'`);
}
console.log('GETTING THE LOCATION');
const archiveUrl = response.headers['Location']; // Do not print the archive URL because it has an embedded token
assert.ok(archiveUrl, `Expected GitHub API response to contain 'Location' header`);
// Download the archive
core.info('Downloading the archive'); // Do not print the archive URL because it has an embedded token
yield downloadFile(archiveUrl, fileStream);
}
finally {
yield fileStreamClosed;
}
// return Buffer.from(response.data) // response.data is ArrayBuffer
})); }));
// Write archive to disk // // Download the archive
core.info('Writing archive to disk'); // core.info('Downloading the archive') // Do not print the URL since it contains a token to download the archive
yield fs.promises.writeFile(archivePath, archiveData); // await downloadFile(archiveUrl, archivePath)
archiveData = Buffer.from(''); // Free memory // // console.log(`status=${response.status}`)
// // Get the archive URL using the REST API // // console.log(`headers=${JSON.stringify(response.headers)}`)
// await retryHelper.execute(async () => { // // console.log(`data=${response.data}`)
// // Prepare the archive stream // // console.log(`data=${JSON.stringify(response.data)}`)
// core.debug(`Preparing the archive stream: ${archivePath}`) // // for (const key of Object.keys(response.data)) {
// await io.rmRF(archivePath) // // console.log(`data['${key}']=${response.data[key]}`)
// const fileStream = fs.createWriteStream(archivePath) // // }
// const fileStreamClosed = getFileClosedPromise(fileStream) // // Write archive to file
// try { // const runnerTemp = process.env['RUNNER_TEMP'] as string
// // Get the archive URL // assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
// core.info('Getting archive URL') // const archivePath = path.join(runnerTemp, 'checkout.tar.gz')
// const archiveUrl = await getArchiveUrl( // await io.rmRF(archivePath)
// accessToken,
// owner,
// repo,
// ref,
// commit
// )
// // Download the archive
// core.info('Downloading the archive') // Do not print the archive URL because it has an embedded token
// await downloadFile(archiveUrl, fileStream)
// } finally {
// fileStream.end()
// await fileStreamClosed
// }
// })
// await fs.promises.writeFile(archivePath, raw) // await fs.promises.writeFile(archivePath, raw)
// // await exec.exec(`ls -la "${archiveFile}"`, [], { // // await exec.exec(`ls -la "${archiveFile}"`, [], {
// // cwd: repositoryPath // // cwd: repositoryPath
// // } as ExecOptions) // // } as ExecOptions)
// Extract archive // Extract archive
const extractPath = path.join(runnerTemp, `checkout`); const extractPath = path.join(runnerTemp, `checkout-archive${IS_WINDOWS ? '.zip' : '.tar.gz'}`);
yield io.rmRF(extractPath); yield io.rmRF(extractPath);
yield io.mkdirP(extractPath); yield io.mkdirP(extractPath);
if (IS_WINDOWS) { if (IS_WINDOWS) {
@ -8442,93 +8457,42 @@ function downloadRepository(accessToken, owner, repo, ref, commit, repositoryPat
}); });
} }
exports.downloadRepository = downloadRepository; exports.downloadRepository = downloadRepository;
function downloadArchive(accessToken, owner, repo, ref, commit) { function downloadFile(url, fileStream) {
return __awaiter(this, void 0, void 0, function* () { return new Promise((resolve, reject) => {
const octokit = new github.GitHub(accessToken); try {
const params = { https.get(url, (response) => {
owner: owner, if (response.statusCode != 200) {
repo: repo, reject(`Request failed with status '${response.statusCode}'`);
archive_format: IS_WINDOWS ? 'zipball' : 'tarball', response.resume(); // Consume response data to free up memory
ref: refHelper.getDownloadRef(ref, commit) return;
}; }
const response = yield octokit.repos.getArchiveLink(params); response.on('data', chunk => {
console.log('GOT THE RESPONSE'); fileStream.write(chunk);
console.log(`status=${response.status}`); });
console.log(`headers=${JSON.stringify(response.headers)}`); response.on('end', () => {
console.log(`data=${JSON.stringify(response.data)}`); resolve();
if (response.status != 200) { });
throw new Error(`Unexpected response from GitHub API. Status: '${response.status}'`); response.on('error', err => {
reject(err);
});
// response.pipe(fileStream)
});
}
catch (err) {
reject(err);
} }
return Buffer.from(response.data); // response.data is ArrayBuffer
}); });
} }
// async function getArchiveUrl( function getFileClosedPromise(stream) {
// accessToken: string, return new Promise((resolve, reject) => {
// owner: string, stream.on('error', err => {
// repo: string, reject(err);
// ref: string, });
// commit: string stream.on('finish', () => {
// ): Promise<string> { resolve();
// const octokit = new github.GitHub(accessToken) });
// const params: RequestOptions & ReposGetArchiveLinkParams = { });
// method: 'HEAD', }
// owner: owner,
// repo: repo,
// archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
// ref: refHelper.getDownloadRef(ref, commit)
// }
// const response = await octokit.repos.getArchiveLink(params)
// console.log('GOT THE RESPONSE')
// console.log(`status=${response.status}`)
// console.log(`headers=${JSON.stringify(response.headers)}`)
// console.log(`data=${JSON.stringify(response.data)}`)
// if (response.status != 200) {
// throw new Error(
// `Unexpected response from GitHub API. Status: '${response.status}'`
// )
// }
// console.log('GETTING THE LOCATION')
// const archiveUrl = response.headers['Location'] // Do not print the archive URL because it has an embedded token
// assert.ok(
// archiveUrl,
// `Expected GitHub API response to contain 'Location' header`
// )
// return archiveUrl
// }
// function downloadFile(url: string, fileStream: WriteStream): Promise<void> {
// return new Promise((resolve, reject) => {
// try {
// https.get(url, (response: IncomingMessage) => {
// if (response.statusCode != 200) {
// reject(`Request failed with status '${response.statusCode}'`)
// response.resume() // Consume response data to free up memory
// return
// }
// response.on('data', chunk => {
// fileStream.write(chunk)
// })
// response.on('end', () => {
// resolve()
// })
// response.on('error', err => {
// reject(err)
// })
// })
// } catch (err) {
// reject(err)
// }
// })
// }
// function getFileClosedPromise(stream: WriteStream): Promise<void> {
// return new Promise((resolve, reject) => {
// stream.on('error', err => {
// reject(err)
// })
// stream.on('finish', () => {
// resolve()
// })
// })
// }
/***/ }), /***/ }),

View File

@ -11,7 +11,8 @@ import * as retryHelper from './retry-helper'
import * as toolCache from '@actions/tool-cache' import * as toolCache from '@actions/tool-cache'
import {ExecOptions} from '@actions/exec/lib/interfaces' import {ExecOptions} from '@actions/exec/lib/interfaces'
import {IncomingMessage} from 'http' import {IncomingMessage} from 'http'
import {RequestOptions, ReposGetArchiveLinkParams} from '@octokit/rest' import {ReposGetArchiveLinkParams} from '@octokit/rest'
import {RequestOptions} from 'https'
import {WriteStream} from 'fs' import {WriteStream} from 'fs'
const IS_WINDOWS = process.platform === 'win32' const IS_WINDOWS = process.platform === 'win32'
@ -28,52 +29,78 @@ export async function downloadRepository(
const runnerTemp = process.env['RUNNER_TEMP'] as string const runnerTemp = process.env['RUNNER_TEMP'] as string
assert.ok(runnerTemp, 'RUNNER_TEMP not defined') assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
const archivePath = path.join(runnerTemp, 'checkout.tar.gz') const archivePath = path.join(runnerTemp, 'checkout.tar.gz')
// await fs.promises.writeFile(archivePath, raw)
// Ensure file does not exist // Get the archive URL using the REST API
core.debug(`Ensuring archive file does not exist: ${archivePath}`) await retryHelper.execute(async () => {
await io.rmRF(archivePath) // Prepare the archive stream
core.debug(`Preparing the archive stream: ${archivePath}`)
await io.rmRF(archivePath)
const fileStream = fs.createWriteStream(archivePath)
const fileStreamClosed = getFileClosedPromise(fileStream)
// Download the archive try {
let archiveData = await retryHelper.execute(async () => { // Get the archive URL using the GitHub REST API
core.info('Downloading the archive using the REST API') core.info('Getting archive URL from GitHub REST API')
return await downloadArchive(accessToken, owner, repo, ref, commit) const octokit = new github.GitHub(accessToken)
const params: RequestOptions & ReposGetArchiveLinkParams = {
method: 'HEAD',
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
owner: owner,
repo: repo,
ref: refHelper.getDownloadRef(ref, commit)
}
const response = await octokit.repos.getArchiveLink(params)
console.log('GOT THE RESPONSE')
if (response.status != 302) {
throw new Error(
`Unexpected response from GitHub API. Status: '${response.status}'`
)
}
console.log('GETTING THE LOCATION')
const archiveUrl = response.headers['Location'] // Do not print the archive URL because it has an embedded token
assert.ok(
archiveUrl,
`Expected GitHub API response to contain 'Location' header`
)
// Download the archive
core.info('Downloading the archive') // Do not print the archive URL because it has an embedded token
await downloadFile(archiveUrl, fileStream)
} finally {
await fileStreamClosed
}
// return Buffer.from(response.data) // response.data is ArrayBuffer
}) })
// Write archive to disk // // Download the archive
core.info('Writing archive to disk') // core.info('Downloading the archive') // Do not print the URL since it contains a token to download the archive
await fs.promises.writeFile(archivePath, archiveData) // await downloadFile(archiveUrl, archivePath)
archiveData = Buffer.from('') // Free memory
// // Get the archive URL using the REST API // // console.log(`status=${response.status}`)
// await retryHelper.execute(async () => { // // console.log(`headers=${JSON.stringify(response.headers)}`)
// // Prepare the archive stream // // console.log(`data=${response.data}`)
// core.debug(`Preparing the archive stream: ${archivePath}`) // // console.log(`data=${JSON.stringify(response.data)}`)
// await io.rmRF(archivePath) // // for (const key of Object.keys(response.data)) {
// const fileStream = fs.createWriteStream(archivePath) // // console.log(`data['${key}']=${response.data[key]}`)
// const fileStreamClosed = getFileClosedPromise(fileStream) // // }
// try { // // Write archive to file
// // Get the archive URL // const runnerTemp = process.env['RUNNER_TEMP'] as string
// core.info('Getting archive URL') // assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
// const archiveUrl = await getArchiveUrl( // const archivePath = path.join(runnerTemp, 'checkout.tar.gz')
// accessToken, // await io.rmRF(archivePath)
// owner, // await fs.promises.writeFile(archivePath, raw)
// repo, // // await exec.exec(`ls -la "${archiveFile}"`, [], {
// ref, // // cwd: repositoryPath
// commit // // } as ExecOptions)
// )
// // Download the archive
// core.info('Downloading the archive') // Do not print the archive URL because it has an embedded token
// await downloadFile(archiveUrl, fileStream)
// } finally {
// fileStream.end()
// await fileStreamClosed
// }
// })
// Extract archive // Extract archive
const extractPath = path.join(runnerTemp, `checkout`) const extractPath = path.join(
runnerTemp,
`checkout-archive${IS_WINDOWS ? '.zip' : '.tar.gz'}`
)
await io.rmRF(extractPath) await io.rmRF(extractPath)
await io.mkdirP(extractPath) await io.mkdirP(extractPath)
if (IS_WINDOWS) { if (IS_WINDOWS) {
@ -81,6 +108,9 @@ export async function downloadRepository(
} else { } else {
await toolCache.extractTar(archivePath, extractPath) await toolCache.extractTar(archivePath, extractPath)
} }
// await exec.exec(`tar -xzf "${archiveFile}"`, [], {
// cwd: extractPath
// } as ExecOptions)
// Determine the real directory to copy (ignore extra dir at root of the archive) // Determine the real directory to copy (ignore extra dir at root of the archive)
const archiveFileNames = await fs.promises.readdir(extractPath) const archiveFileNames = await fs.promises.readdir(extractPath)
@ -104,101 +134,40 @@ export async function downloadRepository(
} as ExecOptions) } as ExecOptions)
} }
async function downloadArchive( function downloadFile(url: string, fileStream: WriteStream): Promise<void> {
accessToken: string, return new Promise((resolve, reject) => {
owner: string, try {
repo: string, https.get(url, (response: IncomingMessage) => {
ref: string, if (response.statusCode != 200) {
commit: string reject(`Request failed with status '${response.statusCode}'`)
): Promise<Buffer> { response.resume() // Consume response data to free up memory
const octokit = new github.GitHub(accessToken) return
const params: ReposGetArchiveLinkParams = { }
owner: owner,
repo: repo,
archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
ref: refHelper.getDownloadRef(ref, commit)
}
const response = await octokit.repos.getArchiveLink(params)
console.log('GOT THE RESPONSE')
console.log(`status=${response.status}`)
console.log(`headers=${JSON.stringify(response.headers)}`)
console.log(`data=${JSON.stringify(response.data)}`)
if (response.status != 200) {
throw new Error(
`Unexpected response from GitHub API. Status: '${response.status}'`
)
}
return Buffer.from(response.data) // response.data is ArrayBuffer response.on('data', chunk => {
fileStream.write(chunk)
})
response.on('end', () => {
resolve()
})
response.on('error', err => {
reject(err)
})
// response.pipe(fileStream)
})
} catch (err) {
reject(err)
}
})
} }
// async function getArchiveUrl( function getFileClosedPromise(stream: WriteStream): Promise<void> {
// accessToken: string, return new Promise((resolve, reject) => {
// owner: string, stream.on('error', err => {
// repo: string, reject(err)
// ref: string, })
// commit: string stream.on('finish', () => {
// ): Promise<string> { resolve()
// const octokit = new github.GitHub(accessToken) })
// const params: RequestOptions & ReposGetArchiveLinkParams = { })
// method: 'HEAD', }
// owner: owner,
// repo: repo,
// archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
// ref: refHelper.getDownloadRef(ref, commit)
// }
// const response = await octokit.repos.getArchiveLink(params)
// console.log('GOT THE RESPONSE')
// console.log(`status=${response.status}`)
// console.log(`headers=${JSON.stringify(response.headers)}`)
// console.log(`data=${JSON.stringify(response.data)}`)
// if (response.status != 200) {
// throw new Error(
// `Unexpected response from GitHub API. Status: '${response.status}'`
// )
// }
// console.log('GETTING THE LOCATION')
// const archiveUrl = response.headers['Location'] // Do not print the archive URL because it has an embedded token
// assert.ok(
// archiveUrl,
// `Expected GitHub API response to contain 'Location' header`
// )
// return archiveUrl
// }
// function downloadFile(url: string, fileStream: WriteStream): Promise<void> {
// return new Promise((resolve, reject) => {
// try {
// https.get(url, (response: IncomingMessage) => {
// if (response.statusCode != 200) {
// reject(`Request failed with status '${response.statusCode}'`)
// response.resume() // Consume response data to free up memory
// return
// }
// response.on('data', chunk => {
// fileStream.write(chunk)
// })
// response.on('end', () => {
// resolve()
// })
// response.on('error', err => {
// reject(err)
// })
// })
// } catch (err) {
// reject(err)
// }
// })
// }
// function getFileClosedPromise(stream: WriteStream): Promise<void> {
// return new Promise((resolve, reject) => {
// stream.on('error', err => {
// reject(err)
// })
// stream.on('finish', () => {
// resolve()
// })
// })
// }