1
0
Fork 0
mirror of https://code.forgejo.org/actions/checkout.git synced 2024-12-27 04:55:52 +00:00
This commit is contained in:
eric sciple 2019-12-10 01:32:25 -05:00
parent 35bb830cfd
commit 64fcc0c59a
8 changed files with 3285 additions and 145 deletions

3118
dist/index.js vendored

File diff suppressed because one or more lines are too long

42
package-lock.json generated
View file

@ -28,6 +28,26 @@
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.1.tgz", "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.1.tgz",
"integrity": "sha512-rhq+tfZukbtaus7xyUtwKfuiCRXd1hWSfmJNEpFgBQJ4woqPEpsBw04awicjwz9tyG2/MVhAEMfVn664Cri5zA==" "integrity": "sha512-rhq+tfZukbtaus7xyUtwKfuiCRXd1hWSfmJNEpFgBQJ4woqPEpsBw04awicjwz9tyG2/MVhAEMfVn664Cri5zA=="
}, },
"@actions/tool-cache": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@actions/tool-cache/-/tool-cache-1.1.2.tgz",
"integrity": "sha512-IJczPaZr02ECa3Lgws/TJEVco9tjOujiQSZbO3dHuXXjhd5vrUtfOgGwhmz3/f97L910OraPZ8SknofUk6RvOQ==",
"requires": {
"@actions/core": "^1.1.0",
"@actions/exec": "^1.0.1",
"@actions/io": "^1.0.1",
"semver": "^6.1.0",
"typed-rest-client": "^1.4.0",
"uuid": "^3.3.2"
},
"dependencies": {
"semver": {
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw=="
}
}
},
"@babel/code-frame": { "@babel/code-frame": {
"version": "7.5.5", "version": "7.5.5",
"resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.5.5.tgz", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.5.5.tgz",
@ -6584,6 +6604,11 @@
"tslib": "^1.8.1" "tslib": "^1.8.1"
} }
}, },
"tunnel": {
"version": "0.0.4",
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.4.tgz",
"integrity": "sha1-LTeFoVjBdMmhbcLARuxfxfF0IhM="
},
"tunnel-agent": { "tunnel-agent": {
"version": "0.6.0", "version": "0.6.0",
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
@ -6608,6 +6633,15 @@
"prelude-ls": "~1.1.2" "prelude-ls": "~1.1.2"
} }
}, },
"typed-rest-client": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-1.5.0.tgz",
"integrity": "sha512-DVZRlmsfnTjp6ZJaatcdyvvwYwbWvR4YDNFDqb+qdTxpvaVP99YCpBkA8rxsLtAPjBVoDe4fNsnMIdZTiPuKWg==",
"requires": {
"tunnel": "0.0.4",
"underscore": "1.8.3"
}
},
"typescript": { "typescript": {
"version": "3.6.4", "version": "3.6.4",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.6.4.tgz", "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.6.4.tgz",
@ -6634,6 +6668,11 @@
} }
} }
}, },
"underscore": {
"version": "1.8.3",
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz",
"integrity": "sha1-Tz+1OxBuYJf8+ctBCfKl6b36UCI="
},
"union-value": { "union-value": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz", "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz",
@ -6734,8 +6773,7 @@
"uuid": { "uuid": {
"version": "3.3.3", "version": "3.3.3",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.3.tgz", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.3.tgz",
"integrity": "sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ==", "integrity": "sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ=="
"dev": true
}, },
"validate-npm-package-license": { "validate-npm-package-license": {
"version": "3.0.4", "version": "3.0.4",

View file

@ -32,7 +32,8 @@
"@actions/core": "^1.1.3", "@actions/core": "^1.1.3",
"@actions/exec": "^1.0.1", "@actions/exec": "^1.0.1",
"@actions/github": "^2.0.0", "@actions/github": "^2.0.0",
"@actions/io": "^1.0.1" "@actions/io": "^1.0.1",
"@actions/tool-cache": "^1.1.2"
}, },
"devDependencies": { "devDependencies": {
"@types/jest": "^24.0.23", "@types/jest": "^24.0.23",

View file

@ -3,6 +3,7 @@ import * as exec from '@actions/exec'
import * as fshelper from './fs-helper' import * as fshelper from './fs-helper'
import * as io from '@actions/io' import * as io from '@actions/io'
import * as path from 'path' import * as path from 'path'
import * as retryHelper from './retry-helper'
import {GitVersion} from './git-version' import {GitVersion} from './git-version'
// Auth header not supported before 2.9 // Auth header not supported before 2.9
@ -154,22 +155,10 @@ class GitCommandManager {
args.push(arg) args.push(arg)
} }
let attempt = 1 const that = this
const maxAttempts = 3 await retryHelper.execute(async () => {
while (attempt <= maxAttempts) { await that.execGit(args)
const allowAllExitCodes = attempt < maxAttempts })
const output = await this.execGit(args, allowAllExitCodes)
if (output.exitCode === 0) {
break
}
const seconds = this.getRandomIntInclusive(1, 10)
core.warning(
`Git fetch failed with exit code ${output.exitCode}. Waiting ${seconds} seconds before trying again.`
)
await this.sleep(seconds * 1000)
attempt++
}
} }
getWorkingDirectory(): string { getWorkingDirectory(): string {
@ -192,22 +181,10 @@ class GitCommandManager {
async lfsFetch(ref: string): Promise<void> { async lfsFetch(ref: string): Promise<void> {
const args = ['lfs', 'fetch', 'origin', ref] const args = ['lfs', 'fetch', 'origin', ref]
let attempt = 1 const that = this
const maxAttempts = 3 await retryHelper.execute(async () => {
while (attempt <= maxAttempts) { await that.execGit(args)
const allowAllExitCodes = attempt < maxAttempts })
const output = await this.execGit(args, allowAllExitCodes)
if (output.exitCode === 0) {
break
}
const seconds = this.getRandomIntInclusive(1, 10)
core.warning(
`Git lfs fetch failed with exit code ${output.exitCode}. Waiting ${seconds} seconds before trying again.`
)
await this.sleep(seconds * 1000)
attempt++
}
} }
async lfsInstall(): Promise<void> { async lfsInstall(): Promise<void> {
@ -381,16 +358,6 @@ class GitCommandManager {
core.debug(`Set git useragent to: ${gitHttpUserAgent}`) core.debug(`Set git useragent to: ${gitHttpUserAgent}`)
this.gitEnv['GIT_HTTP_USER_AGENT'] = gitHttpUserAgent this.gitEnv['GIT_HTTP_USER_AGENT'] = gitHttpUserAgent
} }
private getRandomIntInclusive(minimum: number, maximum: number): number {
minimum = Math.floor(minimum)
maximum = Math.floor(maximum)
return Math.floor(Math.random() * (maximum - minimum + 1)) + minimum
}
private async sleep(milliseconds): Promise<void> {
return new Promise(resolve => setTimeout(resolve, milliseconds))
}
} }
class GitOutput { class GitOutput {

View file

@ -90,6 +90,7 @@ export async function getSource(settings: ISourceSettings): Promise<void> {
settings.repositoryOwner, settings.repositoryOwner,
settings.repositoryName, settings.repositoryName,
settings.ref, settings.ref,
settings.commit,
settings.repositoryPath settings.repositoryPath
) )
} else { } else {

View file

@ -3,11 +3,17 @@ import * as core from '@actions/core'
import * as exec from '@actions/exec' import * as exec from '@actions/exec'
import * as fs from 'fs' import * as fs from 'fs'
import * as github from '@actions/github' import * as github from '@actions/github'
import * as https from 'https'
import * as io from '@actions/io' import * as io from '@actions/io'
import * as path from 'path' import * as path from 'path'
import {ReposGetArchiveLinkParams} from '@octokit/rest' import * as refHelper from './ref-helper'
import {defaultCoreCipherList} from 'constants' import * as retryHelper from './retry-helper'
import * as toolCache from '@actions/tool-cache'
import {ExecOptions} from '@actions/exec/lib/interfaces' import {ExecOptions} from '@actions/exec/lib/interfaces'
import {IncomingMessage} from 'http'
import {ReposGetArchiveLinkParams} from '@octokit/rest'
import {RequestOptions} from 'https'
import {WriteStream} from 'fs'
const IS_WINDOWS = process.platform === 'win32' const IS_WINDOWS = process.platform === 'win32'
@ -16,44 +22,93 @@ export async function downloadRepository(
owner: string, owner: string,
repo: string, repo: string,
ref: string, ref: string,
commit: string,
repositoryPath: string repositoryPath: string
): Promise<void> { ): Promise<void> {
// Determine archive path
const runnerTemp = process.env['RUNNER_TEMP'] as string
assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
const archivePath = path.join(runnerTemp, 'checkout.tar.gz')
// await fs.promises.writeFile(archivePath, raw)
// Get the archive URL using the REST API
let archiveUrl = retryHelper.execute(async () => {
// Prepare the archive stream
core.debug(`Preparing the archive stream: ${archivePath}`)
await io.rmRF(archivePath)
const fileStream = fs.createWriteStream(archivePath)
const fileStreamClosed = getFileClosedPromise(fileStream)
try {
// Get the archive URL using the GitHub REST API
core.info('Getting archive URL from GitHub REST API')
const octokit = new github.GitHub(accessToken) const octokit = new github.GitHub(accessToken)
const params: ReposGetArchiveLinkParams = { const params: RequestOptions & ReposGetArchiveLinkParams = {
method: 'HEAD',
archive_format: IS_WINDOWS ? 'zipball' : 'tarball', archive_format: IS_WINDOWS ? 'zipball' : 'tarball',
owner: owner, owner: owner,
repo: repo, repo: repo,
ref: ref ref: refHelper.getDownloadRef(ref, commit)
} }
// todo: retry
const response = await octokit.repos.getArchiveLink(params) const response = await octokit.repos.getArchiveLink(params)
if (response.status != 200) { if (response.status != 302) {
throw new Error( throw new Error(
`Unexpected response from GitHub API. Status: '${response.status}'; Data: '${response.data}'` `Unexpected response from GitHub API. Status: '${response.status}'`
) )
} }
console.log(`status=${response.status}`) const archiveUrl = response.headers['Location'] // Do not print the archive URL because it has an embedded token
console.log(`headers=${JSON.stringify(response.headers)}`) assert.ok(
// console.log(`data=${response.data}`) archiveUrl,
// console.log(`data=${JSON.stringify(response.data)}`) `Expected GitHub API response to contain 'Location' header`
// for (const key of Object.keys(response.data)) { )
// console.log(`data['${key}']=${response.data[key]}`)
// }
const runnerTemp = process.env['RUNNER_TEMP'] as string
assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
const archiveFile = path.join(runnerTemp, 'checkout-archive.tar.gz')
await io.rmRF(archiveFile)
await fs.promises.writeFile(archiveFile, Buffer.from(response.data))
await exec.exec(`ls -la "${archiveFile}"`, [], {
cwd: repositoryPath
} as ExecOptions)
const extractPath = path.join(runnerTemp, 'checkout-archive') // Download the archive
core.info('Downloading the archive') // Do not print the archive URL because it has an embedded token
downloadFile(archiveUrl, fileStream)
} finally {
await fileStreamClosed
}
// return Buffer.from(response.data) // response.data is ArrayBuffer
})
// // Download the archive
// core.info('Downloading the archive') // Do not print the URL since it contains a token to download the archive
// await downloadFile(archiveUrl, archivePath)
// // console.log(`status=${response.status}`)
// // console.log(`headers=${JSON.stringify(response.headers)}`)
// // console.log(`data=${response.data}`)
// // console.log(`data=${JSON.stringify(response.data)}`)
// // for (const key of Object.keys(response.data)) {
// // console.log(`data['${key}']=${response.data[key]}`)
// // }
// // Write archive to file
// const runnerTemp = process.env['RUNNER_TEMP'] as string
// assert.ok(runnerTemp, 'RUNNER_TEMP not defined')
// const archivePath = path.join(runnerTemp, 'checkout.tar.gz')
// await io.rmRF(archivePath)
// await fs.promises.writeFile(archivePath, raw)
// // await exec.exec(`ls -la "${archiveFile}"`, [], {
// // cwd: repositoryPath
// // } as ExecOptions)
// Extract archive
const extractPath = path.join(
runnerTemp,
`checkout-archive${IS_WINDOWS ? '.zip' : '.tar.gz'}`
)
await io.rmRF(extractPath) await io.rmRF(extractPath)
await io.mkdirP(extractPath) await io.mkdirP(extractPath)
await exec.exec(`tar -xzf "${archiveFile}"`, [], { if (IS_WINDOWS) {
cwd: extractPath await toolCache.extractZip(archivePath, extractPath)
} as ExecOptions) } else {
await toolCache.extractTar(archivePath, extractPath)
}
// await exec.exec(`tar -xzf "${archiveFile}"`, [], {
// cwd: extractPath
// } as ExecOptions)
// Determine the real directory to copy (ignore extra dir at root of the archive) // Determine the real directory to copy (ignore extra dir at root of the archive)
const archiveFileNames = await fs.promises.readdir(extractPath) const archiveFileNames = await fs.promises.readdir(extractPath)
@ -76,3 +131,40 @@ export async function downloadRepository(
cwd: repositoryPath cwd: repositoryPath
} as ExecOptions) } as ExecOptions)
} }
function downloadFile(url: string, fileStream: WriteStream): Promise<void> {
return new Promise((resolve, reject) => {
try {
https.get(url, (response: IncomingMessage) => {
if (response.statusCode != 200) {
reject(`Request failed with status '${response.statusCode}'`)
response.resume() // Consume response data to free up memory
return
}
response.on('data', chunk => {
fileStream.write(chunk)
})
response.on('end', () => {
resolve()
})
response.on('error', err => {
reject(err)
})
})
} catch (err) {
reject(err)
}
})
}
function getFileClosedPromise(stream: WriteStream): Promise<void> {
return new Promise((resolve, reject) => {
stream.on('error', err => {
reject(err)
})
stream.on('finish', () => {
resolve()
})
})
}

View file

@ -5,6 +5,15 @@ export interface ICheckoutInfo {
startPoint: string startPoint: string
} }
export function getDownloadRef(ref: string, commit: string): string {
if (commit) {
return commit
}
// todo fix this to work with refs/pull etc
return ref
}
export async function getCheckoutInfo( export async function getCheckoutInfo(
git: IGitCommandManager, git: IGitCommandManager,
ref: string, ref: string,

36
src/retry-helper.ts Normal file
View file

@ -0,0 +1,36 @@
import * as core from '@actions/core'
const maxAttempts = 3
const minSeconds = 10
const maxSeconds = 20
export async function execute<T>(action: () => Promise<T>): Promise<T> {
let attempt = 1
while (attempt < maxAttempts) {
// Try
try {
return await action()
} catch (err) {
core.info(err.message)
}
// Sleep
const seconds = getRandomIntInclusive(minSeconds, maxSeconds)
core.info(`Waiting ${seconds} before trying again`)
await sleep(seconds * 1000)
attempt++
}
// Last attempt
return await action()
}
function getRandomIntInclusive(minimum: number, maximum: number): number {
minimum = Math.floor(minimum)
maximum = Math.floor(maximum)
return Math.floor(Math.random() * (maximum - minimum + 1)) + minimum
}
async function sleep(milliseconds): Promise<void> {
return new Promise(resolve => setTimeout(resolve, milliseconds))
}