2023-08-24 15:57:52 +00:00
|
|
|
import * as os from 'os'
|
|
|
|
import * as path from 'path'
|
2020-04-28 13:45:21 +00:00
|
|
|
import * as core from '@actions/core'
|
2023-12-07 00:43:53 +00:00
|
|
|
import artifactClient from '@actions/artifact'
|
|
|
|
import type {Artifact, FindOptions} from '@actions/artifact'
|
2023-12-15 22:18:41 +00:00
|
|
|
import {Minimatch} from 'minimatch'
|
2020-07-15 10:09:31 +00:00
|
|
|
import {Inputs, Outputs} from './constants'
|
2020-04-28 13:45:21 +00:00
|
|
|
|
2023-08-24 15:57:52 +00:00
|
|
|
const PARALLEL_DOWNLOADS = 5
|
|
|
|
|
|
|
|
export const chunk = <T>(arr: T[], n: number): T[][] =>
|
|
|
|
arr.reduce((acc, cur, i) => {
|
|
|
|
const index = Math.floor(i / n)
|
|
|
|
acc[index] = [...(acc[index] || []), cur]
|
|
|
|
return acc
|
|
|
|
}, [] as T[][])
|
|
|
|
|
2020-04-28 13:45:21 +00:00
|
|
|
async function run(): Promise<void> {
|
2023-08-24 15:57:52 +00:00
|
|
|
const inputs = {
|
|
|
|
name: core.getInput(Inputs.Name, {required: false}),
|
|
|
|
path: core.getInput(Inputs.Path, {required: false}),
|
2023-12-01 18:20:09 +00:00
|
|
|
token: core.getInput(Inputs.GitHubToken, {required: false}),
|
|
|
|
repository: core.getInput(Inputs.Repository, {required: false}),
|
2023-12-15 22:18:41 +00:00
|
|
|
runID: parseInt(core.getInput(Inputs.RunID, {required: false})),
|
|
|
|
pattern: core.getInput(Inputs.Pattern, {required: false}),
|
|
|
|
mergeMultiple: core.getBooleanInput(Inputs.MergeMultiple, {required: false})
|
2023-08-24 15:57:52 +00:00
|
|
|
}
|
|
|
|
|
2023-08-24 16:05:05 +00:00
|
|
|
if (!inputs.path) {
|
|
|
|
inputs.path = process.env['GITHUB_WORKSPACE'] || process.cwd()
|
|
|
|
}
|
|
|
|
|
2023-08-24 15:57:52 +00:00
|
|
|
if (inputs.path.startsWith(`~`)) {
|
|
|
|
inputs.path = inputs.path.replace('~', os.homedir())
|
|
|
|
}
|
|
|
|
|
2023-12-01 18:20:09 +00:00
|
|
|
const isSingleArtifactDownload = !!inputs.name
|
2023-08-24 15:57:52 +00:00
|
|
|
const resolvedPath = path.resolve(inputs.path)
|
|
|
|
core.debug(`Resolved path is ${resolvedPath}`)
|
|
|
|
|
2023-12-07 00:43:53 +00:00
|
|
|
const options: FindOptions = {}
|
2023-12-01 18:20:09 +00:00
|
|
|
if (inputs.token) {
|
|
|
|
const [repositoryOwner, repositoryName] = inputs.repository.split('/')
|
|
|
|
if (!repositoryOwner || !repositoryName) {
|
|
|
|
throw new Error(
|
|
|
|
`Invalid repository: '${inputs.repository}'. Must be in format owner/repo`
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
options.findBy = {
|
|
|
|
token: inputs.token,
|
|
|
|
workflowRunId: inputs.runID,
|
|
|
|
repositoryName,
|
|
|
|
repositoryOwner
|
|
|
|
}
|
2023-08-24 15:57:52 +00:00
|
|
|
}
|
|
|
|
|
2023-12-07 00:43:53 +00:00
|
|
|
let artifacts: Artifact[] = []
|
2023-08-24 15:57:52 +00:00
|
|
|
|
2023-10-27 14:11:07 +00:00
|
|
|
if (isSingleArtifactDownload) {
|
|
|
|
core.info(`Downloading single artifact`)
|
|
|
|
|
2023-08-24 15:57:52 +00:00
|
|
|
const {artifact: targetArtifact} = await artifactClient.getArtifact(
|
|
|
|
inputs.name,
|
2023-12-01 18:20:09 +00:00
|
|
|
options
|
2023-08-24 15:57:52 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
if (!targetArtifact) {
|
|
|
|
throw new Error(`Artifact '${inputs.name}' not found`)
|
2020-07-30 10:01:38 +00:00
|
|
|
}
|
2023-08-24 15:57:52 +00:00
|
|
|
|
|
|
|
core.debug(
|
|
|
|
`Found named artifact '${inputs.name}' (ID: ${targetArtifact.id}, Size: ${targetArtifact.size})`
|
|
|
|
)
|
|
|
|
|
|
|
|
artifacts = [targetArtifact]
|
|
|
|
} else {
|
2023-12-01 18:20:09 +00:00
|
|
|
core.info(
|
|
|
|
`No input name specified, downloading all artifacts. Extra directory with the artifact name will be created for each download`
|
2023-08-24 15:57:52 +00:00
|
|
|
)
|
|
|
|
|
2023-12-04 18:08:24 +00:00
|
|
|
const listArtifactResponse = await artifactClient.listArtifacts({
|
|
|
|
latest: true,
|
|
|
|
...options
|
|
|
|
})
|
2023-12-15 22:18:41 +00:00
|
|
|
artifacts = listArtifactResponse.artifacts
|
2023-12-01 18:20:09 +00:00
|
|
|
|
2023-12-15 22:18:41 +00:00
|
|
|
core.debug(`Found ${artifacts.length} artifacts in run`)
|
|
|
|
|
|
|
|
if (inputs.pattern) {
|
|
|
|
core.info(`Filtering artifacts by pattern '${inputs.pattern}'`)
|
|
|
|
const matcher = new Minimatch(inputs.pattern)
|
|
|
|
artifacts = artifacts.filter(artifact => matcher.match(artifact.name))
|
|
|
|
core.debug(
|
|
|
|
`Filtered from ${listArtifactResponse.artifacts.length} to ${artifacts.length} artifacts`
|
2020-04-28 13:45:21 +00:00
|
|
|
)
|
|
|
|
}
|
2023-12-15 22:18:41 +00:00
|
|
|
}
|
2023-08-24 15:57:52 +00:00
|
|
|
|
2023-12-15 22:18:41 +00:00
|
|
|
if (artifacts.length) {
|
|
|
|
core.info(`Preparing to download the following artifacts:`)
|
|
|
|
artifacts.forEach(artifact => {
|
|
|
|
core.info(
|
|
|
|
`- ${artifact.name} (ID: ${artifact.id}, Size: ${artifact.size})`
|
|
|
|
)
|
|
|
|
})
|
2023-08-24 15:57:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const downloadPromises = artifacts.map(artifact =>
|
2023-12-01 18:20:09 +00:00
|
|
|
artifactClient.downloadArtifact(artifact.id, {
|
|
|
|
...options,
|
2023-12-15 22:18:41 +00:00
|
|
|
path:
|
|
|
|
isSingleArtifactDownload || inputs.mergeMultiple
|
|
|
|
? resolvedPath
|
|
|
|
: path.join(resolvedPath, artifact.name)
|
2023-08-24 15:57:52 +00:00
|
|
|
})
|
|
|
|
)
|
|
|
|
|
|
|
|
const chunkedPromises = chunk(downloadPromises, PARALLEL_DOWNLOADS)
|
|
|
|
for (const chunk of chunkedPromises) {
|
|
|
|
await Promise.all(chunk)
|
2020-04-28 13:45:21 +00:00
|
|
|
}
|
2023-08-24 15:57:52 +00:00
|
|
|
|
|
|
|
core.info(`Total of ${artifacts.length} artifact(s) downloaded`)
|
|
|
|
core.setOutput(Outputs.DownloadPath, resolvedPath)
|
|
|
|
core.info('Download artifact has finished successfully')
|
2020-04-28 13:45:21 +00:00
|
|
|
}
|
|
|
|
|
2023-08-24 15:57:52 +00:00
|
|
|
run().catch(err =>
|
|
|
|
core.setFailed(`Unable to download artifact(s): ${err.message}`)
|
|
|
|
)
|