3
0
Fork 0
mirror of https://gitea.com/docker/metadata-action.git synced 2024-11-26 22:19:32 +01:00

switch to actions-toolkit implementation

Signed-off-by: CrazyMax <crazy-max@users.noreply.github.com>
This commit is contained in:
CrazyMax 2023-02-20 22:32:55 +01:00
parent 766400ca14
commit b5c378621c
No known key found for this signature in database
GPG key ID: 3248E46B6BB8C7F7
15 changed files with 260 additions and 349 deletions

View file

@ -1,168 +1,63 @@
import {describe, expect, it, jest} from '@jest/globals'; import {beforeEach, describe, expect, test} from '@jest/globals';
import * as fs from 'fs';
import * as path from 'path';
import * as context from '../src/context'; import * as context from '../src/context';
jest.spyOn(context, 'tmpDir').mockImplementation((): string => { describe('getInputs', () => {
const tmpDir = path.join('/tmp/.docker-metadata-action-jest').split(path.sep).join(path.posix.sep); beforeEach(() => {
if (!fs.existsSync(tmpDir)) { process.env = Object.keys(process.env).reduce((object, key) => {
fs.mkdirSync(tmpDir, {recursive: true}); if (!key.startsWith('INPUT_')) {
} object[key] = process.env[key];
return tmpDir; }
}); return object;
}, {});
describe('getInputList', () => {
it('single line correctly', async () => {
await setInput('foo', 'bar');
const res = context.getInputList('foo');
expect(res).toEqual(['bar']);
}); });
it('multiline correctly', async () => { // prettier-ignore
setInput('foo', 'bar\nbaz'); test.each([
const res = context.getInputList('foo'); [
expect(res).toEqual(['bar', 'baz']); 0,
}); new Map<string, string>([
['images', 'moby/buildkit\nghcr.io/moby/mbuildkit'],
it('empty lines correctly', async () => { ]),
setInput('foo', 'bar\n\nbaz'); {
const res = context.getInputList('foo'); bakeTarget: 'docker-metadata-action',
expect(res).toEqual(['bar', 'baz']); flavor: [],
}); githubToken: '',
images: ['moby/buildkit', 'ghcr.io/moby/mbuildkit'],
it('comment correctly', async () => { labels: [],
setInput('foo', 'bar\n#com\n"#taken"\nhello#comment\nbaz'); sepLabels: '\n',
const res = context.getInputList('foo'); sepTags: '\n',
expect(res).toEqual(['bar', '#taken', 'hello', 'baz']); tags: [],
}); } as context.Inputs
],
it('comma correctly', async () => { [
setInput('foo', 'bar,baz'); 1,
const res = context.getInputList('foo'); new Map<string, string>([
expect(res).toEqual(['bar', 'baz']); ['bake-target', 'metadata'],
}); ['images', 'moby/buildkit'],
['sep-labels', ','],
it('empty result correctly', async () => { ['sep-tags', ','],
setInput('foo', 'bar,baz,'); ]),
const res = context.getInputList('foo'); {
expect(res).toEqual(['bar', 'baz']); bakeTarget: 'metadata',
}); flavor: [],
githubToken: '',
it('different new lines correctly', async () => { images: ['moby/buildkit'],
setInput('foo', 'bar\r\nbaz'); labels: [],
const res = context.getInputList('foo'); sepLabels: ',',
expect(res).toEqual(['bar', 'baz']); sepTags: ',',
}); tags: [],
} as context.Inputs
it('different new lines and comma correctly', async () => { ]
setInput('foo', 'bar\r\nbaz,bat'); ])(
const res = context.getInputList('foo'); '[%d] given %p as inputs, returns %p',
expect(res).toEqual(['bar', 'baz', 'bat']); async (num: number, inputs: Map<string, string>, expected: context.Inputs) => {
}); inputs.forEach((value: string, name: string) => {
setInput(name, value);
it('multiline and ignoring comma correctly', async () => { });
setInput('cache-from', 'user/app:cache\ntype=local,src=path/to/dir'); expect(await context.getInputs()).toEqual(expected);
const res = context.getInputList('cache-from', true); }
expect(res).toEqual(['user/app:cache', 'type=local,src=path/to/dir']); );
});
it('different new lines and ignoring comma correctly', async () => {
setInput('cache-from', 'user/app:cache\r\ntype=local,src=path/to/dir');
const res = context.getInputList('cache-from', true);
expect(res).toEqual(['user/app:cache', 'type=local,src=path/to/dir']);
});
it('multiline values', async () => {
setInput(
'secrets',
`GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789
"MYSECRET=aaaaaaaa
bbbbbbb
ccccccccc"
FOO=bar`
);
const res = context.getInputList('secrets', true);
expect(res).toEqual([
'GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789',
`MYSECRET=aaaaaaaa
bbbbbbb
ccccccccc`,
'FOO=bar'
]);
});
it('multiline values with empty lines', async () => {
setInput(
'secrets',
`GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789
"MYSECRET=aaaaaaaa
bbbbbbb
ccccccccc"
FOO=bar
"EMPTYLINE=aaaa
bbbb
ccc"`
);
const res = context.getInputList('secrets', true);
expect(res).toEqual([
'GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789',
`MYSECRET=aaaaaaaa
bbbbbbb
ccccccccc`,
'FOO=bar',
`EMPTYLINE=aaaa
bbbb
ccc`
]);
});
it('multiline values without quotes', async () => {
setInput(
'secrets',
`GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789
MYSECRET=aaaaaaaa
bbbbbbb
ccccccccc
FOO=bar`
);
const res = context.getInputList('secrets', true);
expect(res).toEqual(['GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789', 'MYSECRET=aaaaaaaa', 'bbbbbbb', 'ccccccccc', 'FOO=bar']);
});
it('multiline values escape quotes', async () => {
setInput(
'secrets',
`GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789
"MYSECRET=aaaaaaaa
bbbb""bbb
ccccccccc"
FOO=bar`
);
const res = context.getInputList('secrets', true);
expect(res).toEqual([
'GIT_AUTH_TOKEN=abcdefgh,ijklmno=0123456789',
`MYSECRET=aaaaaaaa
bbbb"bbb
ccccccccc`,
'FOO=bar'
]);
});
});
describe('asyncForEach', () => {
it('executes async tasks sequentially', async () => {
const testValues = [1, 2, 3, 4, 5];
const results: number[] = [];
await context.asyncForEach(testValues, async value => {
results.push(value);
});
expect(results).toEqual(testValues);
});
}); });
// See: https://github.com/actions/toolkit/blob/master/packages/core/src/core.ts#L67 // See: https://github.com/actions/toolkit/blob/master/packages/core/src/core.ts#L67

View file

@ -1,4 +1,5 @@
import {describe, expect, test} from '@jest/globals'; import {describe, expect, test} from '@jest/globals';
import {Flavor, Transform} from '../src/flavor'; import {Flavor, Transform} from '../src/flavor';
describe('transform', () => { describe('transform', () => {

View file

@ -1,14 +0,0 @@
import {describe, expect, jest, it} from '@jest/globals';
import * as github from '../src/github';
import * as repoFixture from './fixtures/repo.json';
jest.spyOn(github, 'repo').mockImplementation((): Promise<github.ReposGetResponseData> => {
return <Promise<github.ReposGetResponseData>>(repoFixture as unknown);
});
describe('repo', () => {
it('returns GitHub repository', async () => {
const repo = await github.repo(process.env.GITHUB_TOKEN || '');
expect(repo.name).not.toBeNull();
});
});

View file

@ -1,4 +1,5 @@
import {describe, expect, test} from '@jest/globals'; import {describe, expect, test} from '@jest/globals';
import {Transform, Image} from '../src/image'; import {Transform, Image} from '../src/image';
describe('transform', () => { describe('transform', () => {

View file

@ -2,19 +2,17 @@ import {beforeEach, describe, expect, jest, test} from '@jest/globals';
import * as fs from 'fs'; import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
import * as dotenv from 'dotenv'; import * as dotenv from 'dotenv';
import moment from 'moment-timezone';
import {getInputs, Inputs} from '../src/context';
import * as github from '../src/github';
import {Meta, Version} from '../src/meta';
import {Context} from '@actions/github/lib/context'; import {Context} from '@actions/github/lib/context';
import {GitHub} from '@docker/actions-toolkit/lib/github';
import {Toolkit} from '@docker/actions-toolkit/lib/toolkit';
import {GitHubRepo} from '@docker/actions-toolkit/lib/types/github';
import * as repoFixture from './fixtures/repo.json'; import {getInputs, Inputs} from '../src/context';
jest.spyOn(github, 'repo').mockImplementation((): Promise<github.ReposGetResponseData> => { import {Meta, Version} from '../src/meta';
return <Promise<github.ReposGetResponseData>>(repoFixture as unknown);
});
jest.spyOn(github, 'context').mockImplementation((): Context => { import repoFixture from './fixtures/repo.json';
return new Context(); jest.spyOn(GitHub.prototype, 'repoData').mockImplementation((): Promise<GitHubRepo> => {
return <Promise<GitHubRepo>>(repoFixture as unknown);
}); });
jest.spyOn(global.Date.prototype, 'toISOString').mockImplementation(() => { jest.spyOn(global.Date.prototype, 'toISOString').mockImplementation(() => {
@ -26,6 +24,7 @@ jest.mock('moment-timezone', () => {
}); });
beforeEach(() => { beforeEach(() => {
jest.clearAllMocks();
Object.keys(process.env).forEach(function (key) { Object.keys(process.env).forEach(function (key) {
if (key !== 'GITHUB_TOKEN' && key.startsWith('GITHUB_')) { if (key !== 'GITHUB_TOKEN' && key.startsWith('GITHUB_')) {
delete process.env[key]; delete process.env[key];
@ -48,10 +47,9 @@ describe('isRawStatement', () => {
const tagsLabelsTest = async (name: string, envFile: string, inputs: Inputs, exVersion: Version, exTags: Array<string>, exLabels: Array<string>) => { const tagsLabelsTest = async (name: string, envFile: string, inputs: Inputs, exVersion: Version, exTags: Array<string>, exLabels: Array<string>) => {
process.env = dotenv.parse(fs.readFileSync(path.join(__dirname, 'fixtures', envFile))); process.env = dotenv.parse(fs.readFileSync(path.join(__dirname, 'fixtures', envFile)));
const context = github.context(); const toolkit = new Toolkit();
const repo = await toolkit.github.repoData();
const repo = await github.repo(process.env.GITHUB_TOKEN || ''); const meta = new Meta({...getInputs(), ...inputs}, new Context(), repo);
const meta = new Meta({...getInputs(), ...inputs}, context, repo);
const version = meta.version; const version = meta.version;
expect(version).toEqual(exVersion); expect(version).toEqual(exVersion);
@ -2765,10 +2763,10 @@ describe('pr-head-sha', () => {
])('given %p with %p event', async (name: string, envFile: string, inputs: Inputs, exVersion: Version, exTags: Array<string>, exLabels: Array<string>) => { ])('given %p with %p event', async (name: string, envFile: string, inputs: Inputs, exVersion: Version, exTags: Array<string>, exLabels: Array<string>) => {
process.env = dotenv.parse(fs.readFileSync(path.join(__dirname, 'fixtures', envFile))); process.env = dotenv.parse(fs.readFileSync(path.join(__dirname, 'fixtures', envFile)));
process.env.DOCKER_METADATA_PR_HEAD_SHA = 'true'; process.env.DOCKER_METADATA_PR_HEAD_SHA = 'true';
const context = github.context();
const repo = await github.repo(process.env.GITHUB_TOKEN || ''); const toolkit = new Toolkit();
const meta = new Meta({...getInputs(), ...inputs}, context, repo); const repo = await toolkit.github.repoData();
const meta = new Meta({...getInputs(), ...inputs}, new Context(), repo);
const version = meta.version; const version = meta.version;
expect(version).toEqual(exVersion); expect(version).toEqual(exVersion);
@ -3707,10 +3705,10 @@ describe('json', () => {
] ]
])('given %p with %p event', async (name: string, envFile: string, inputs: Inputs, exJSON: unknown) => { ])('given %p with %p event', async (name: string, envFile: string, inputs: Inputs, exJSON: unknown) => {
process.env = dotenv.parse(fs.readFileSync(path.join(__dirname, 'fixtures', envFile))); process.env = dotenv.parse(fs.readFileSync(path.join(__dirname, 'fixtures', envFile)));
const context = github.context();
const repo = await github.repo(process.env.GITHUB_TOKEN || ''); const toolkit = new Toolkit();
const meta = new Meta({...getInputs(), ...inputs}, context, repo); const repo = await toolkit.github.repoData();
const meta = new Meta({...getInputs(), ...inputs}, new Context(), repo);
const jsonOutput = meta.getJSON(); const jsonOutput = meta.getJSON();
expect(jsonOutput).toEqual(exJSON); expect(jsonOutput).toEqual(exJSON);
@ -4013,10 +4011,10 @@ describe('bake', () => {
] ]
])('given %p with %p event', async (name: string, envFile: string, inputs: Inputs, exBakeDefinition: unknown) => { ])('given %p with %p event', async (name: string, envFile: string, inputs: Inputs, exBakeDefinition: unknown) => {
process.env = dotenv.parse(fs.readFileSync(path.join(__dirname, 'fixtures', envFile))); process.env = dotenv.parse(fs.readFileSync(path.join(__dirname, 'fixtures', envFile)));
const context = github.context();
const repo = await github.repo(process.env.GITHUB_TOKEN || ''); const toolkit = new Toolkit();
const meta = new Meta({...getInputs(), ...inputs}, context, repo); const repo = await toolkit.github.repoData();
const meta = new Meta({...getInputs(), ...inputs}, new Context(), repo);
const bakeFile = meta.getBakeFile(); const bakeFile = meta.getBakeFile();
expect(JSON.parse(fs.readFileSync(bakeFile, 'utf8'))).toEqual(exBakeDefinition); expect(JSON.parse(fs.readFileSync(bakeFile, 'utf8'))).toEqual(exBakeDefinition);
@ -4059,10 +4057,10 @@ describe('sepTags', () => {
] ]
])('given %p with %p event', async (name: string, envFile: string, inputs: Inputs, expTags: string) => { ])('given %p with %p event', async (name: string, envFile: string, inputs: Inputs, expTags: string) => {
process.env = dotenv.parse(fs.readFileSync(path.join(__dirname, 'fixtures', envFile))); process.env = dotenv.parse(fs.readFileSync(path.join(__dirname, 'fixtures', envFile)));
const context = github.context();
const repo = await github.repo(process.env.GITHUB_TOKEN || ''); const toolkit = new Toolkit();
const meta = new Meta({...getInputs(), ...inputs}, context, repo); const repo = await toolkit.github.repoData();
const meta = new Meta({...getInputs(), ...inputs}, new Context(), repo);
expect(meta.getTags().join(inputs.sepTags)).toEqual(expTags); expect(meta.getTags().join(inputs.sepTags)).toEqual(expTags);
}); });

View file

@ -1,4 +1,5 @@
import {describe, expect, test} from '@jest/globals'; import {describe, expect, test} from '@jest/globals';
import {Transform, Parse, Tag, Type, RefEvent, ShaFormat, DefaultPriorities} from '../src/tag'; import {Transform, Parse, Tag, Type, RefEvent, ShaFormat, DefaultPriorities} from '../src/tag';
describe('transform', () => { describe('transform', () => {

View file

@ -16,14 +16,14 @@ COPY --from=deps /vendor /
FROM deps AS vendor-validate FROM deps AS vendor-validate
RUN --mount=type=bind,target=.,rw <<EOT RUN --mount=type=bind,target=.,rw <<EOT
set -e set -e
git add -A git add -A
cp -rf /vendor/* . cp -rf /vendor/* .
if [ -n "$(git status --porcelain -- yarn.lock)" ]; then if [ -n "$(git status --porcelain -- yarn.lock)" ]; then
echo >&2 'ERROR: Vendor result differs. Please vendor your package with "docker buildx bake vendor-update"' echo >&2 'ERROR: Vendor result differs. Please vendor your package with "docker buildx bake vendor-update"'
git status --porcelain -- yarn.lock git status --porcelain -- yarn.lock
exit 1 exit 1
fi fi
EOT EOT
FROM deps AS build FROM deps AS build
@ -36,14 +36,14 @@ COPY --from=build /out /
FROM build AS build-validate FROM build AS build-validate
RUN --mount=type=bind,target=.,rw <<EOT RUN --mount=type=bind,target=.,rw <<EOT
set -e set -e
git add -A git add -A
cp -rf /out/* . cp -rf /out/* .
if [ -n "$(git status --porcelain -- dist)" ]; then if [ -n "$(git status --porcelain -- dist)" ]; then
echo >&2 'ERROR: Build result differs. Please build first with "docker buildx bake build"' echo >&2 'ERROR: Build result differs. Please build first with "docker buildx bake build"'
git status --porcelain -- dist git status --porcelain -- dist
exit 1 exit 1
fi fi
EOT EOT
FROM deps AS format FROM deps AS format

View file

@ -1,5 +1,21 @@
import fs from 'fs';
import os from 'os';
import path from 'path';
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'docker-metadata-action-'));
process.env = Object.assign({}, process.env, {
TEMP: tmpDir,
GITHUB_REPOSITORY: 'docker/metadata-action',
RUNNER_TEMP: path.join(tmpDir, 'runner-temp'),
RUNNER_TOOL_CACHE: path.join(tmpDir, 'runner-tool-cache')
}) as {
[key: string]: string;
};
module.exports = { module.exports = {
clearMocks: true, clearMocks: true,
testEnvironment: 'node',
moduleFileExtensions: ['js', 'ts'], moduleFileExtensions: ['js', 'ts'],
setupFiles: ['dotenv/config'], setupFiles: ['dotenv/config'],
testMatch: ['**/*.test.ts'], testMatch: ['**/*.test.ts'],
@ -9,5 +25,7 @@ module.exports = {
moduleNameMapper: { moduleNameMapper: {
'^csv-parse/sync': '<rootDir>/node_modules/csv-parse/dist/cjs/sync.cjs' '^csv-parse/sync': '<rootDir>/node_modules/csv-parse/dist/cjs/sync.cjs'
}, },
collectCoverageFrom: ['src/**/{!(main.ts),}.ts'],
coveragePathIgnorePatterns: ['lib/', 'node_modules/', '__mocks__/', '__tests__/'],
verbose: true verbose: true
}; };

View file

@ -31,6 +31,7 @@
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/github": "^5.1.1", "@actions/github": "^5.1.1",
"@docker/actions-toolkit": "^0.1.0-beta.14",
"@renovate/pep440": "^1.0.0", "@renovate/pep440": "^1.0.0",
"csv-parse": "^5.3.3", "csv-parse": "^5.3.3",
"handlebars": "^4.7.7", "handlebars": "^4.7.7",

View file

@ -1,10 +1,5 @@
import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import * as core from '@actions/core'; import * as core from '@actions/core';
import {parse} from 'csv-parse/sync'; import {Util} from '@docker/actions-toolkit/lib/util';
let _tmpDir: string;
export interface Inputs { export interface Inputs {
images: string[]; images: string[];
@ -17,58 +12,15 @@ export interface Inputs {
githubToken: string; githubToken: string;
} }
export function tmpDir(): string {
if (!_tmpDir) {
_tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'docker-metadata-action-')).split(path.sep).join(path.posix.sep);
}
return _tmpDir;
}
export function getInputs(): Inputs { export function getInputs(): Inputs {
return { return {
images: getInputList('images', true), images: Util.getInputList('images', {ignoreComma: true}),
tags: getInputList('tags', true), tags: Util.getInputList('tags', {ignoreComma: true}),
flavor: getInputList('flavor', true), flavor: Util.getInputList('flavor', {ignoreComma: true}),
labels: getInputList('labels', true), labels: Util.getInputList('labels', {ignoreComma: true}),
sepTags: core.getInput('sep-tags', {trimWhitespace: false}) || `\n`, sepTags: core.getInput('sep-tags', {trimWhitespace: false}) || `\n`,
sepLabels: core.getInput('sep-labels', {trimWhitespace: false}) || `\n`, sepLabels: core.getInput('sep-labels', {trimWhitespace: false}) || `\n`,
bakeTarget: core.getInput('bake-target') || `docker-metadata-action`, bakeTarget: core.getInput('bake-target') || `docker-metadata-action`,
githubToken: core.getInput('github-token') githubToken: core.getInput('github-token')
}; };
} }
export function getInputList(name: string, ignoreComma?: boolean): string[] {
const res: Array<string> = [];
const items = core.getInput(name);
if (items == '') {
return res;
}
const records = parse(items, {
columns: false,
relaxQuotes: true,
comment: '#',
relaxColumnCount: true,
skipEmptyLines: true
});
for (const record of records as Array<string[]>) {
if (record.length == 1) {
res.push(record[0]);
continue;
} else if (!ignoreComma) {
res.push(...record);
continue;
}
res.push(record.join(','));
}
return res.filter(item => item).map(pat => pat.trim());
}
export const asyncForEach = async (array, callback) => {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array);
}
};

View file

@ -1,16 +0,0 @@
import * as github from '@actions/github';
import {Context} from '@actions/github/lib/context';
import {components as OctoOpenApiTypes} from '@octokit/openapi-types';
export type ReposGetResponseData = OctoOpenApiTypes['schemas']['repository'];
export function context(): Context {
return github.context;
}
export async function repo(token: string): Promise<ReposGetResponseData> {
return github
.getOctokit(token)
.rest.repos.get({...github.context.repo})
.then(response => response.data as ReposGetResponseData);
}

View file

@ -1,34 +1,45 @@
import * as fs from 'fs'; import * as fs from 'fs';
import {getInputs, Inputs} from './context';
import * as github from './github';
import {Meta, Version} from './meta';
import * as core from '@actions/core'; import * as core from '@actions/core';
import * as actionsToolkit from '@docker/actions-toolkit';
import {Context} from '@actions/github/lib/context'; import {Context} from '@actions/github/lib/context';
import {GitHub} from '@docker/actions-toolkit/lib/github';
import {Toolkit} from '@docker/actions-toolkit/lib/toolkit';
async function run() { import {getInputs, Inputs} from './context';
try { import {Meta, Version} from './meta';
function setOutput(name: string, value: string) {
core.setOutput(name, value);
core.exportVariable(`DOCKER_METADATA_OUTPUT_${name.replace(/\W/g, '_').toUpperCase()}`, value);
}
actionsToolkit.run(
// main
async () => {
const inputs: Inputs = await getInputs(); const inputs: Inputs = await getInputs();
if (inputs.images.length == 0) { if (inputs.images.length == 0) {
throw new Error(`images input required`); throw new Error(`images input required`);
} }
const context: Context = github.context(); const toolkit = new Toolkit({githubToken: inputs.githubToken});
const repo: github.ReposGetResponseData = await github.repo(inputs.githubToken); const context: Context = GitHub.context;
core.startGroup(`Context info`); const repo = await toolkit.github.repoData();
core.info(`eventName: ${context.eventName}`);
core.info(`sha: ${context.sha}`); await core.group(`Context info`, async () => {
core.info(`ref: ${context.ref}`); core.info(`eventName: ${context.eventName}`);
core.info(`workflow: ${context.workflow}`); core.info(`sha: ${context.sha}`);
core.info(`action: ${context.action}`); core.info(`ref: ${context.ref}`);
core.info(`actor: ${context.actor}`); core.info(`workflow: ${context.workflow}`);
core.info(`runNumber: ${context.runNumber}`); core.info(`action: ${context.action}`);
core.info(`runId: ${context.runId}`); core.info(`actor: ${context.actor}`);
core.endGroup(); core.info(`runNumber: ${context.runNumber}`);
core.info(`runId: ${context.runId}`);
});
if (core.isDebug()) { if (core.isDebug()) {
core.startGroup(`Webhook payload`); await core.group(`Webhook payload`, async () => {
core.info(JSON.stringify(context.payload, null, 2)); core.info(JSON.stringify(context.payload, null, 2));
core.endGroup(); });
} }
const meta: Meta = new Meta(inputs, context, repo); const meta: Meta = new Meta(inputs, context, repo);
@ -37,9 +48,9 @@ async function run() {
if (meta.version.main == undefined || meta.version.main.length == 0) { if (meta.version.main == undefined || meta.version.main.length == 0) {
core.warning(`No Docker image version has been generated. Check tags input.`); core.warning(`No Docker image version has been generated. Check tags input.`);
} else { } else {
core.startGroup(`Docker image version`); await core.group(`Docker image version`, async () => {
core.info(version.main || ''); core.info(version.main || '');
core.endGroup(); });
} }
setOutput('version', version.main || ''); setOutput('version', version.main || '');
@ -48,44 +59,35 @@ async function run() {
if (tags.length == 0) { if (tags.length == 0) {
core.warning('No Docker tag has been generated. Check tags input.'); core.warning('No Docker tag has been generated. Check tags input.');
} else { } else {
core.startGroup(`Docker tags`); await core.group(`Docker tags`, async () => {
for (const tag of tags) { for (const tag of tags) {
core.info(tag); core.info(tag);
} }
core.endGroup(); });
} }
setOutput('tags', tags.join(inputs.sepTags)); setOutput('tags', tags.join(inputs.sepTags));
// Docker labels // Docker labels
const labels: Array<string> = meta.getLabels(); const labels: Array<string> = meta.getLabels();
core.startGroup(`Docker labels`); await core.group(`Docker labels`, async () => {
for (const label of labels) { for (const label of labels) {
core.info(label); core.info(label);
} }
core.endGroup(); setOutput('labels', labels.join(inputs.sepLabels));
setOutput('labels', labels.join(inputs.sepLabels)); });
// JSON // JSON
const jsonOutput = meta.getJSON(); const jsonOutput = meta.getJSON();
core.startGroup(`JSON output`); await core.group(`JSON output`, async () => {
core.info(JSON.stringify(jsonOutput, null, 2)); core.info(JSON.stringify(jsonOutput, null, 2));
core.endGroup(); setOutput('json', JSON.stringify(jsonOutput));
setOutput('json', JSON.stringify(jsonOutput)); });
// Bake file definition // Bake file definition
const bakeFile: string = meta.getBakeFile(); const bakeFile: string = meta.getBakeFile();
core.startGroup(`Bake file definition`); await core.group(`Bake file definition`, async () => {
core.info(fs.readFileSync(bakeFile, 'utf8')); core.info(fs.readFileSync(bakeFile, 'utf8'));
core.endGroup(); setOutput('bake-file', bakeFile);
setOutput('bake-file', bakeFile); });
} catch (error) {
core.setFailed(error.message);
} }
} );
function setOutput(name: string, value: string) {
core.setOutput(name, value);
core.exportVariable(`DOCKER_METADATA_OUTPUT_${name.replace(/\W/g, '_').toUpperCase()}`, value);
}
run();

View file

@ -4,13 +4,15 @@ import * as path from 'path';
import moment from 'moment-timezone'; import moment from 'moment-timezone';
import * as pep440 from '@renovate/pep440'; import * as pep440 from '@renovate/pep440';
import * as semver from 'semver'; import * as semver from 'semver';
import {Inputs, tmpDir} from './context'; import * as core from '@actions/core';
import {ReposGetResponseData} from './github'; import {Context} from '@actions/github/lib/context';
import {Context as ToolkitContext} from '@docker/actions-toolkit/lib/context';
import {GitHubRepo} from '@docker/actions-toolkit/lib/types/github';
import {Inputs} from './context';
import * as icl from './image'; import * as icl from './image';
import * as tcl from './tag'; import * as tcl from './tag';
import * as fcl from './flavor'; import * as fcl from './flavor';
import * as core from '@actions/core';
import {Context} from '@actions/github/lib/context';
export interface Version { export interface Version {
main: string | undefined; main: string | undefined;
@ -23,13 +25,13 @@ export class Meta {
private readonly inputs: Inputs; private readonly inputs: Inputs;
private readonly context: Context; private readonly context: Context;
private readonly repo: ReposGetResponseData; private readonly repo: GitHubRepo;
private readonly images: icl.Image[]; private readonly images: icl.Image[];
private readonly tags: tcl.Tag[]; private readonly tags: tcl.Tag[];
private readonly flavor: fcl.Flavor; private readonly flavor: fcl.Flavor;
private readonly date: Date; private readonly date: Date;
constructor(inputs: Inputs, context: Context, repo: ReposGetResponseData) { constructor(inputs: Inputs, context: Context, repo: GitHubRepo) {
// Needs to override Git reference with pr ref instead of upstream branch ref // Needs to override Git reference with pr ref instead of upstream branch ref
// for pull_request_target event // for pull_request_target event
// https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target // https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target
@ -498,7 +500,7 @@ export class Meta {
} }
public getBakeFile(): string { public getBakeFile(): string {
const bakeFile = path.join(tmpDir(), 'docker-metadata-action-bake.json').split(path.sep).join(path.posix.sep); const bakeFile = path.join(ToolkitContext.tmpDir(), 'docker-metadata-action-bake.json');
fs.writeFileSync( fs.writeFileSync(
bakeFile, bakeFile,
JSON.stringify( JSON.stringify(

View file

@ -1,20 +1,21 @@
{ {
"compilerOptions": { "compilerOptions": {
"esModuleInterop": true,
"target": "es6", "target": "es6",
"module": "commonjs", "module": "commonjs",
"strict": true,
"newLine": "lf", "newLine": "lf",
"outDir": "./lib", "outDir": "./lib",
"rootDir": "./src", "rootDir": "./src",
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true, "forceConsistentCasingInFileNames": true,
"strict": true,
"noImplicitAny": false, "noImplicitAny": false,
"resolveJsonModule": true, "resolveJsonModule": true,
"useUnknownInCatchVariables": false, "useUnknownInCatchVariables": false,
}, },
"exclude": [ "exclude": [
"./__tests__/**/*",
"./lib/**/*",
"node_modules", "node_modules",
"**/*.test.ts",
"jest.config.ts" "jest.config.ts"
] ]
} }

View file

@ -2,7 +2,7 @@
# yarn lockfile v1 # yarn lockfile v1
"@actions/core@^1.10.0": "@actions/core@^1.10.0", "@actions/core@^1.2.6":
version "1.10.0" version "1.10.0"
resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.10.0.tgz#44551c3c71163949a2f06e94d9ca2157a0cfac4f" resolved "https://registry.yarnpkg.com/@actions/core/-/core-1.10.0.tgz#44551c3c71163949a2f06e94d9ca2157a0cfac4f"
integrity sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug== integrity sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==
@ -10,6 +10,13 @@
"@actions/http-client" "^2.0.1" "@actions/http-client" "^2.0.1"
uuid "^8.3.2" uuid "^8.3.2"
"@actions/exec@^1.0.0", "@actions/exec@^1.1.1":
version "1.1.1"
resolved "https://registry.yarnpkg.com/@actions/exec/-/exec-1.1.1.tgz#2e43f28c54022537172819a7cf886c844221a611"
integrity sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==
dependencies:
"@actions/io" "^1.0.1"
"@actions/github@^5.1.1": "@actions/github@^5.1.1":
version "5.1.1" version "5.1.1"
resolved "https://registry.yarnpkg.com/@actions/github/-/github-5.1.1.tgz#40b9b9e1323a5efcf4ff7dadd33d8ea51651bbcb" resolved "https://registry.yarnpkg.com/@actions/github/-/github-5.1.1.tgz#40b9b9e1323a5efcf4ff7dadd33d8ea51651bbcb"
@ -27,6 +34,23 @@
dependencies: dependencies:
tunnel "^0.0.6" tunnel "^0.0.6"
"@actions/io@^1.0.1", "@actions/io@^1.1.1", "@actions/io@^1.1.2":
version "1.1.2"
resolved "https://registry.yarnpkg.com/@actions/io/-/io-1.1.2.tgz#766ac09674a289ce0f1550ffe0a6eac9261a8ea9"
integrity sha512-d+RwPlMp+2qmBfeLYPLXuSRykDIFEwdTA0MMxzS9kh4kvP1ftrc/9fzy6pX6qAjthdXruHQ6/6kjT/DNo5ALuw==
"@actions/tool-cache@^2.0.1":
version "2.0.1"
resolved "https://registry.yarnpkg.com/@actions/tool-cache/-/tool-cache-2.0.1.tgz#8a649b9c07838d9d750c9864814e66a7660ab720"
integrity sha512-iPU+mNwrbA8jodY8eyo/0S/QqCKDajiR8OxWTnSk/SnYg0sj8Hp4QcUEVC1YFpHWXtrfbQrE13Jz4k4HXJQKcA==
dependencies:
"@actions/core" "^1.2.6"
"@actions/exec" "^1.0.0"
"@actions/http-client" "^2.0.1"
"@actions/io" "^1.1.1"
semver "^6.1.0"
uuid "^3.3.2"
"@ampproject/remapping@^2.1.0": "@ampproject/remapping@^2.1.0":
version "2.1.2" version "2.1.2"
resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.1.2.tgz#4edca94973ded9630d20101cd8559cedb8d8bd34" resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.1.2.tgz#4edca94973ded9630d20101cd8559cedb8d8bd34"
@ -542,6 +566,22 @@
dependencies: dependencies:
"@cspotcode/source-map-consumer" "0.8.0" "@cspotcode/source-map-consumer" "0.8.0"
"@docker/actions-toolkit@^0.1.0-beta.14":
version "0.1.0-beta.14"
resolved "https://registry.yarnpkg.com/@docker/actions-toolkit/-/actions-toolkit-0.1.0-beta.14.tgz#82fa8a6b9802a7f770fde3ddcef1cf591739a80b"
integrity sha512-N+aqiO0E2ygoaBORN8fx4K7j/CzJ2nCSgOewtDm0gdzrch8qZmTU14e3oNAbZlP8Q34Lk45KKefm5wDfLipRqg==
dependencies:
"@actions/core" "^1.10.0"
"@actions/exec" "^1.1.1"
"@actions/github" "^5.1.1"
"@actions/http-client" "^2.0.1"
"@actions/io" "^1.1.2"
"@actions/tool-cache" "^2.0.1"
csv-parse "^5.3.5"
jwt-decode "^3.1.2"
semver "^7.3.8"
tmp "^0.2.1"
"@eslint/eslintrc@^1.2.1": "@eslint/eslintrc@^1.2.1":
version "1.2.1" version "1.2.1"
resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-1.2.1.tgz#8b5e1c49f4077235516bc9ec7d41378c0f69b8c6" resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-1.2.1.tgz#8b5e1c49f4077235516bc9ec7d41378c0f69b8c6"
@ -1610,6 +1650,11 @@ csv-parse@*, csv-parse@^5.3.3:
resolved "https://registry.yarnpkg.com/csv-parse/-/csv-parse-5.3.3.tgz#3b75d2279e2edb550cbc54c65b25cbbf3d0033ad" resolved "https://registry.yarnpkg.com/csv-parse/-/csv-parse-5.3.3.tgz#3b75d2279e2edb550cbc54c65b25cbbf3d0033ad"
integrity sha512-kEWkAPleNEdhFNkHQpFHu9RYPogsFj3dx6bCxL847fsiLgidzWg0z/O0B1kVWMJUc5ky64zGp18LX2T3DQrOfw== integrity sha512-kEWkAPleNEdhFNkHQpFHu9RYPogsFj3dx6bCxL847fsiLgidzWg0z/O0B1kVWMJUc5ky64zGp18LX2T3DQrOfw==
csv-parse@^5.3.5:
version "5.3.5"
resolved "https://registry.yarnpkg.com/csv-parse/-/csv-parse-5.3.5.tgz#9924bbba9f7056122f06b7af18edc1a7f022ce99"
integrity sha512-8O5KTIRtwmtD3+EVfW6BCgbwZqJbhTYsQZry12F1TP5RUp0sD9tp1UnCWic3n0mLOhzeocYaCZNYxOGSg3dmmQ==
data-urls@^2.0.0: data-urls@^2.0.0:
version "2.0.0" version "2.0.0"
resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b" resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-2.0.0.tgz#156485a72963a970f5d5821aaf642bef2bf2db9b"
@ -2826,6 +2871,11 @@ json5@2.x, json5@^2.1.2:
resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283"
integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==
jwt-decode@^3.1.2:
version "3.1.2"
resolved "https://registry.yarnpkg.com/jwt-decode/-/jwt-decode-3.1.2.tgz#3fb319f3675a2df0c2895c8f5e9fa4b67b04ed59"
integrity sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==
kleur@^3.0.3: kleur@^3.0.3:
version "3.0.3" version "3.0.3"
resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e"
@ -3292,11 +3342,18 @@ semver@7.x, semver@^7.3.2, semver@^7.3.5, semver@^7.3.7:
dependencies: dependencies:
lru-cache "^6.0.0" lru-cache "^6.0.0"
semver@^6.0.0, semver@^6.3.0: semver@^6.0.0, semver@^6.1.0, semver@^6.3.0:
version "6.3.0" version "6.3.0"
resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d"
integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==
semver@^7.3.8:
version "7.3.8"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798"
integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==
dependencies:
lru-cache "^6.0.0"
shebang-command@^2.0.0: shebang-command@^2.0.0:
version "2.0.0" version "2.0.0"
resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea"
@ -3476,6 +3533,13 @@ throat@^6.0.1:
resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375" resolved "https://registry.yarnpkg.com/throat/-/throat-6.0.1.tgz#d514fedad95740c12c2d7fc70ea863eb51ade375"
integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w== integrity sha512-8hmiGIJMDlwjg7dlJ4yKGLK8EsYqKgPWbG3b4wjJddKNwc7N7Dpn08Df4szr/sZdMVeOstrdYSsqzX6BYbcB+w==
tmp@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.2.1.tgz#8457fc3037dcf4719c251367a1af6500ee1ccf14"
integrity sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==
dependencies:
rimraf "^3.0.0"
tmpl@1.0.x: tmpl@1.0.x:
version "1.0.5" version "1.0.5"
resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc"
@ -3627,6 +3691,11 @@ uri-js@^4.2.2:
dependencies: dependencies:
punycode "^2.1.0" punycode "^2.1.0"
uuid@^3.3.2:
version "3.4.0"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee"
integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==
uuid@^8.3.2: uuid@^8.3.2:
version "8.3.2" version "8.3.2"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"