feat(atlassian): implement milestone M2 - jira command surface
This commit is contained in:
92
skills/atlassian/shared/scripts/src/adf.ts
Normal file
92
skills/atlassian/shared/scripts/src/adf.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
const TEXT_NODE = "text";
|
||||
|
||||
function textNode(text: string) {
|
||||
return {
|
||||
type: TEXT_NODE,
|
||||
text,
|
||||
};
|
||||
}
|
||||
|
||||
function paragraphNode(lines: string[]) {
|
||||
const content: Array<{ type: string; text?: string }> = [];
|
||||
|
||||
lines.forEach((line, index) => {
|
||||
if (index > 0) {
|
||||
content.push({ type: "hardBreak" });
|
||||
}
|
||||
|
||||
if (line.length > 0) {
|
||||
content.push(textNode(line));
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
type: "paragraph",
|
||||
...(content.length > 0 ? { content } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
export function markdownToAdf(input: string) {
|
||||
const lines = input.replace(/\r\n/g, "\n").split("\n");
|
||||
const content: Array<Record<string, unknown>> = [];
|
||||
let index = 0;
|
||||
|
||||
while (index < lines.length) {
|
||||
const current = lines[index]?.trimEnd() ?? "";
|
||||
|
||||
if (current.trim().length === 0) {
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
const heading = current.match(/^(#{1,6})\s+(.*)$/);
|
||||
|
||||
if (heading) {
|
||||
content.push({
|
||||
type: "heading",
|
||||
attrs: { level: heading[1].length },
|
||||
content: [textNode(heading[2])],
|
||||
});
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (/^[-*]\s+/.test(current)) {
|
||||
const items: Array<Record<string, unknown>> = [];
|
||||
|
||||
while (index < lines.length && /^[-*]\s+/.test(lines[index] ?? "")) {
|
||||
items.push({
|
||||
type: "listItem",
|
||||
content: [
|
||||
{
|
||||
type: "paragraph",
|
||||
content: [textNode((lines[index] ?? "").replace(/^[-*]\s+/, ""))],
|
||||
},
|
||||
],
|
||||
});
|
||||
index += 1;
|
||||
}
|
||||
|
||||
content.push({
|
||||
type: "bulletList",
|
||||
content: items,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
const paragraphLines: string[] = [];
|
||||
|
||||
while (index < lines.length && (lines[index]?.trim().length ?? 0) > 0) {
|
||||
paragraphLines.push(lines[index] ?? "");
|
||||
index += 1;
|
||||
}
|
||||
|
||||
content.push(paragraphNode(paragraphLines));
|
||||
}
|
||||
|
||||
return {
|
||||
type: "doc",
|
||||
version: 1,
|
||||
content,
|
||||
};
|
||||
}
|
||||
@@ -1,5 +1,67 @@
|
||||
import process from "node:process";
|
||||
import { pathToFileURL } from "node:url";
|
||||
|
||||
import { Command } from "commander";
|
||||
|
||||
import { loadConfig } from "./config.js";
|
||||
import { readWorkspaceFile } from "./files.js";
|
||||
import { createJiraClient } from "./jira.js";
|
||||
import { writeOutput } from "./output.js";
|
||||
import type { FetchLike, OutputFormat, Writer } from "./types.js";
|
||||
|
||||
type CliContext = {
|
||||
cwd?: string;
|
||||
env?: NodeJS.ProcessEnv;
|
||||
fetchImpl?: FetchLike;
|
||||
stdout?: Writer;
|
||||
stderr?: Writer;
|
||||
};
|
||||
|
||||
function resolveFormat(format: string | undefined): OutputFormat {
|
||||
return format === "text" ? "text" : "json";
|
||||
}
|
||||
|
||||
function createRuntime(context: CliContext) {
|
||||
const cwd = context.cwd ?? process.cwd();
|
||||
const env = context.env ?? process.env;
|
||||
const stdout = context.stdout ?? process.stdout;
|
||||
const stderr = context.stderr ?? process.stderr;
|
||||
let configCache: ReturnType<typeof loadConfig> | undefined;
|
||||
let jiraCache: ReturnType<typeof createJiraClient> | undefined;
|
||||
|
||||
function getConfig() {
|
||||
configCache ??= loadConfig(env, { cwd });
|
||||
return configCache;
|
||||
}
|
||||
|
||||
function getJiraClient() {
|
||||
jiraCache ??= createJiraClient({
|
||||
config: getConfig(),
|
||||
fetchImpl: context.fetchImpl,
|
||||
});
|
||||
return jiraCache;
|
||||
}
|
||||
|
||||
async function readBodyFile(filePath: string | undefined) {
|
||||
if (!filePath) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return readWorkspaceFile(filePath, cwd);
|
||||
}
|
||||
|
||||
return {
|
||||
cwd,
|
||||
stdout,
|
||||
stderr,
|
||||
readBodyFile,
|
||||
getConfig,
|
||||
getJiraClient,
|
||||
};
|
||||
}
|
||||
|
||||
export function buildProgram(context: CliContext = {}) {
|
||||
const runtime = createRuntime(context);
|
||||
const program = new Command()
|
||||
.name("atlassian")
|
||||
.description("Portable Atlassian CLI for multi-agent skills")
|
||||
@@ -10,13 +72,141 @@ program
|
||||
.description("Validate configuration and Atlassian connectivity")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action((options) => {
|
||||
const payload = {
|
||||
ok: false,
|
||||
message: "health is not implemented yet",
|
||||
format: options.format,
|
||||
};
|
||||
|
||||
console.log(JSON.stringify(payload, null, 2));
|
||||
writeOutput(
|
||||
runtime.stdout,
|
||||
{
|
||||
ok: true,
|
||||
data: {
|
||||
baseUrl: runtime.getConfig().baseUrl,
|
||||
jiraBaseUrl: runtime.getConfig().jiraBaseUrl,
|
||||
confluenceBaseUrl: runtime.getConfig().confluenceBaseUrl,
|
||||
defaultProject: runtime.getConfig().defaultProject,
|
||||
defaultSpace: runtime.getConfig().defaultSpace,
|
||||
},
|
||||
},
|
||||
resolveFormat(options.format),
|
||||
);
|
||||
});
|
||||
|
||||
program.parse(process.argv);
|
||||
program
|
||||
.command("jira-search")
|
||||
.requiredOption("--jql <jql>", "JQL expression to execute")
|
||||
.option("--max-results <number>", "Maximum results to return", "50")
|
||||
.option("--start-at <number>", "Result offset", "0")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().searchIssues({
|
||||
jql: options.jql,
|
||||
maxResults: Number(options.maxResults),
|
||||
startAt: Number(options.startAt),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-get")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().getIssue(options.issue);
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-create")
|
||||
.requiredOption("--type <type>", "Issue type name")
|
||||
.requiredOption("--summary <summary>", "Issue summary")
|
||||
.option("--project <project>", "Project key")
|
||||
.option("--description-file <path>", "Workspace-relative markdown/text file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().createIssue({
|
||||
project: options.project,
|
||||
type: options.type,
|
||||
summary: options.summary,
|
||||
description: await runtime.readBodyFile(options.descriptionFile),
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-update")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.option("--summary <summary>", "Updated summary")
|
||||
.option("--description-file <path>", "Workspace-relative markdown/text file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().updateIssue({
|
||||
issue: options.issue,
|
||||
summary: options.summary,
|
||||
description: await runtime.readBodyFile(options.descriptionFile),
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-comment")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative markdown/text file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().commentIssue({
|
||||
issue: options.issue,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-transitions")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().getTransitions(options.issue);
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-transition")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.requiredOption("--transition <transition>", "Transition ID")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().transitionIssue({
|
||||
issue: options.issue,
|
||||
transition: options.transition,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
return program;
|
||||
}
|
||||
|
||||
export async function runCli(argv = process.argv, context: CliContext = {}) {
|
||||
const program = buildProgram(context);
|
||||
await program.parseAsync(argv);
|
||||
}
|
||||
|
||||
const isDirectExecution =
|
||||
Boolean(process.argv[1]) && import.meta.url === pathToFileURL(process.argv[1]).href;
|
||||
|
||||
if (isDirectExecution) {
|
||||
runCli().catch((error: unknown) => {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
process.stderr.write(`${message}\n`);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
}
|
||||
|
||||
52
skills/atlassian/shared/scripts/src/config.ts
Normal file
52
skills/atlassian/shared/scripts/src/config.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import path from "node:path";
|
||||
|
||||
import { config as loadDotEnv } from "dotenv";
|
||||
|
||||
import type { AtlassianConfig } from "./types.js";
|
||||
|
||||
function normalizeBaseUrl(value: string) {
|
||||
return value.replace(/\/+$/, "");
|
||||
}
|
||||
|
||||
function readRequired(env: NodeJS.ProcessEnv, key: string) {
|
||||
const value = env[key]?.trim();
|
||||
|
||||
if (!value) {
|
||||
throw new Error(`Missing required environment variable: ${key}`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
export function loadConfig(
|
||||
env: NodeJS.ProcessEnv = process.env,
|
||||
options?: {
|
||||
cwd?: string;
|
||||
},
|
||||
): AtlassianConfig {
|
||||
loadDotEnv({
|
||||
path: path.resolve(options?.cwd ?? process.cwd(), ".env"),
|
||||
processEnv: env as Record<string, string>,
|
||||
override: false,
|
||||
});
|
||||
|
||||
const baseUrl = normalizeBaseUrl(readRequired(env, "ATLASSIAN_BASE_URL"));
|
||||
|
||||
return {
|
||||
baseUrl,
|
||||
jiraBaseUrl: normalizeBaseUrl(env.ATLASSIAN_JIRA_BASE_URL?.trim() || baseUrl),
|
||||
confluenceBaseUrl: normalizeBaseUrl(env.ATLASSIAN_CONFLUENCE_BASE_URL?.trim() || baseUrl),
|
||||
email: readRequired(env, "ATLASSIAN_EMAIL"),
|
||||
apiToken: readRequired(env, "ATLASSIAN_API_TOKEN"),
|
||||
defaultProject: env.ATLASSIAN_DEFAULT_PROJECT?.trim() || undefined,
|
||||
defaultSpace: env.ATLASSIAN_DEFAULT_SPACE?.trim() || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
export function createBasicAuthHeader(config: {
|
||||
email: string;
|
||||
apiToken: string;
|
||||
[key: string]: unknown;
|
||||
}) {
|
||||
return `Basic ${Buffer.from(`${config.email}:${config.apiToken}`).toString("base64")}`;
|
||||
}
|
||||
13
skills/atlassian/shared/scripts/src/files.ts
Normal file
13
skills/atlassian/shared/scripts/src/files.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { readFile } from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
|
||||
export async function readWorkspaceFile(filePath: string, cwd: string) {
|
||||
const resolved = path.resolve(cwd, filePath);
|
||||
const relative = path.relative(cwd, resolved);
|
||||
|
||||
if (relative.startsWith("..") || path.isAbsolute(relative)) {
|
||||
throw new Error(`--body-file must stay within the active workspace: ${filePath}`);
|
||||
}
|
||||
|
||||
return readFile(resolved, "utf8");
|
||||
}
|
||||
294
skills/atlassian/shared/scripts/src/jira.ts
Normal file
294
skills/atlassian/shared/scripts/src/jira.ts
Normal file
@@ -0,0 +1,294 @@
|
||||
import { markdownToAdf } from "./adf.js";
|
||||
import { createBasicAuthHeader } from "./config.js";
|
||||
import type { AtlassianConfig, CommandOutput, FetchLike, JiraIssueSummary } from "./types.js";
|
||||
|
||||
const ISSUE_FIELDS = ["summary", "issuetype", "status", "assignee", "created", "updated"] as const;
|
||||
|
||||
type JiraClientOptions = {
|
||||
config: AtlassianConfig;
|
||||
fetchImpl?: FetchLike;
|
||||
};
|
||||
|
||||
type SearchInput = {
|
||||
jql: string;
|
||||
maxResults: number;
|
||||
startAt: number;
|
||||
};
|
||||
|
||||
type CreateInput = {
|
||||
project?: string;
|
||||
type: string;
|
||||
summary: string;
|
||||
description?: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type UpdateInput = {
|
||||
issue: string;
|
||||
summary?: string;
|
||||
description?: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type CommentInput = {
|
||||
issue: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type TransitionInput = {
|
||||
issue: string;
|
||||
transition: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
function normalizeIssue(config: AtlassianConfig, issue: Record<string, unknown>): JiraIssueSummary {
|
||||
const fields = (issue.fields ?? {}) as Record<string, unknown>;
|
||||
const issueType = (fields.issuetype ?? {}) as Record<string, unknown>;
|
||||
const status = (fields.status ?? {}) as Record<string, unknown>;
|
||||
const assignee = (fields.assignee ?? {}) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
key: String(issue.key ?? ""),
|
||||
summary: String(fields.summary ?? ""),
|
||||
issueType: String(issueType.name ?? ""),
|
||||
status: String(status.name ?? ""),
|
||||
assignee: assignee.displayName ? String(assignee.displayName) : undefined,
|
||||
created: String(fields.created ?? ""),
|
||||
updated: String(fields.updated ?? ""),
|
||||
url: `${config.baseUrl}/browse/${issue.key ?? ""}`,
|
||||
};
|
||||
}
|
||||
|
||||
function createHeaders(config: AtlassianConfig, includeJsonBody: boolean) {
|
||||
const headers: Array<[string, string]> = [
|
||||
["Accept", "application/json"],
|
||||
["Authorization", createBasicAuthHeader(config)],
|
||||
];
|
||||
|
||||
if (includeJsonBody) {
|
||||
headers.push(["Content-Type", "application/json"]);
|
||||
}
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
async function parseResponse(response: Response) {
|
||||
if (response.status === 204) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const contentType = response.headers.get("content-type") ?? "";
|
||||
|
||||
if (contentType.includes("application/json")) {
|
||||
return response.json();
|
||||
}
|
||||
|
||||
return response.text();
|
||||
}
|
||||
|
||||
function createRequest(config: AtlassianConfig, method: "GET" | "POST" | "PUT", path: string, body?: unknown) {
|
||||
const url = new URL(path, `${config.jiraBaseUrl}/`);
|
||||
|
||||
return {
|
||||
method,
|
||||
url: url.toString(),
|
||||
...(body === undefined ? {} : { body }),
|
||||
};
|
||||
}
|
||||
|
||||
export function createJiraClient(options: JiraClientOptions) {
|
||||
const fetchImpl = options.fetchImpl ?? globalThis.fetch;
|
||||
|
||||
if (!fetchImpl) {
|
||||
throw new Error("Fetch API is not available in this runtime");
|
||||
}
|
||||
|
||||
async function send(method: "GET" | "POST" | "PUT", path: string, body?: unknown) {
|
||||
const request = createRequest(options.config, method, path, body);
|
||||
const response = await fetchImpl(request.url, {
|
||||
method,
|
||||
headers: createHeaders(options.config, body !== undefined),
|
||||
...(body === undefined ? {} : { body: JSON.stringify(body) }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Jira request failed: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
return parseResponse(response);
|
||||
}
|
||||
|
||||
return {
|
||||
async searchIssues(input: SearchInput): Promise<CommandOutput<unknown>> {
|
||||
const raw = (await send("POST", "/rest/api/3/search", {
|
||||
jql: input.jql,
|
||||
maxResults: input.maxResults,
|
||||
startAt: input.startAt,
|
||||
fields: [...ISSUE_FIELDS],
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const issues = Array.isArray(raw.issues) ? raw.issues : [];
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issues: issues.map((issue) => normalizeIssue(options.config, issue as Record<string, unknown>)),
|
||||
startAt: Number(raw.startAt ?? input.startAt),
|
||||
maxResults: Number(raw.maxResults ?? input.maxResults),
|
||||
total: Number(raw.total ?? issues.length),
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async getIssue(issue: string): Promise<CommandOutput<unknown>> {
|
||||
const url = new URL(`/rest/api/3/issue/${issue}`, `${options.config.jiraBaseUrl}/`);
|
||||
url.searchParams.set("fields", ISSUE_FIELDS.join(","));
|
||||
|
||||
const raw = (await send("GET", `${url.pathname}${url.search}`)) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issue: normalizeIssue(options.config, raw),
|
||||
},
|
||||
raw,
|
||||
};
|
||||
},
|
||||
|
||||
async getTransitions(issue: string): Promise<CommandOutput<unknown>> {
|
||||
const raw = (await send(
|
||||
"GET",
|
||||
`/rest/api/3/issue/${issue}/transitions`,
|
||||
)) as { transitions?: Array<Record<string, unknown>> };
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
transitions: (raw.transitions ?? []).map((transition) => ({
|
||||
id: String(transition.id ?? ""),
|
||||
name: String(transition.name ?? ""),
|
||||
toStatus: String(((transition.to ?? {}) as Record<string, unknown>).name ?? ""),
|
||||
hasScreen: Boolean(transition.hasScreen),
|
||||
})),
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async createIssue(input: CreateInput): Promise<CommandOutput<unknown>> {
|
||||
const project = input.project || options.config.defaultProject;
|
||||
|
||||
if (!project) {
|
||||
throw new Error("jira-create requires --project or ATLASSIAN_DEFAULT_PROJECT");
|
||||
}
|
||||
|
||||
const request = createRequest(options.config, "POST", "/rest/api/3/issue", {
|
||||
fields: {
|
||||
project: { key: project },
|
||||
issuetype: { name: input.type },
|
||||
summary: input.summary,
|
||||
...(input.description ? { description: markdownToAdf(input.description) } : {}),
|
||||
},
|
||||
});
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await send("POST", "/rest/api/3/issue", request.body);
|
||||
return { ok: true, data: raw };
|
||||
},
|
||||
|
||||
async updateIssue(input: UpdateInput): Promise<CommandOutput<unknown>> {
|
||||
const fields: Record<string, unknown> = {};
|
||||
|
||||
if (input.summary) {
|
||||
fields.summary = input.summary;
|
||||
}
|
||||
|
||||
if (input.description) {
|
||||
fields.description = markdownToAdf(input.description);
|
||||
}
|
||||
|
||||
if (Object.keys(fields).length === 0) {
|
||||
throw new Error("jira-update requires --summary and/or --description-file");
|
||||
}
|
||||
|
||||
const request = createRequest(options.config, "PUT", `/rest/api/3/issue/${input.issue}`, {
|
||||
fields,
|
||||
});
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
await send("PUT", `/rest/api/3/issue/${input.issue}`, request.body);
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issue: input.issue,
|
||||
updated: true,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async commentIssue(input: CommentInput): Promise<CommandOutput<unknown>> {
|
||||
const request = createRequest(options.config, "POST", `/rest/api/3/issue/${input.issue}/comment`, {
|
||||
body: markdownToAdf(input.body),
|
||||
});
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await send("POST", `/rest/api/3/issue/${input.issue}/comment`, request.body);
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
|
||||
async transitionIssue(input: TransitionInput): Promise<CommandOutput<unknown>> {
|
||||
const request = createRequest(
|
||||
options.config,
|
||||
"POST",
|
||||
`/rest/api/3/issue/${input.issue}/transitions`,
|
||||
{
|
||||
transition: {
|
||||
id: input.transition,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
await send("POST", `/rest/api/3/issue/${input.issue}/transitions`, request.body);
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issue: input.issue,
|
||||
transitioned: true,
|
||||
transition: input.transition,
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
44
skills/atlassian/shared/scripts/src/output.ts
Normal file
44
skills/atlassian/shared/scripts/src/output.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import type { CommandOutput, OutputFormat, Writer } from "./types.js";
|
||||
|
||||
function renderText(payload: CommandOutput<unknown>) {
|
||||
const data = payload.data as Record<string, unknown>;
|
||||
|
||||
if (Array.isArray(data?.issues)) {
|
||||
return data.issues
|
||||
.map((issue) => {
|
||||
const item = issue as Record<string, string>;
|
||||
return `${item.key} [${item.status}] ${item.issueType} - ${item.summary}`;
|
||||
})
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
if (data?.issue && typeof data.issue === "object") {
|
||||
const issue = data.issue as Record<string, string>;
|
||||
return [
|
||||
issue.key,
|
||||
`${issue.issueType} | ${issue.status}`,
|
||||
issue.summary,
|
||||
issue.url,
|
||||
].join("\n");
|
||||
}
|
||||
|
||||
if (Array.isArray(data?.transitions)) {
|
||||
return data.transitions
|
||||
.map((transition) => {
|
||||
const item = transition as Record<string, string>;
|
||||
return `${item.id} ${item.name} -> ${item.toStatus}`;
|
||||
})
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
return JSON.stringify(payload, null, 2);
|
||||
}
|
||||
|
||||
export function writeOutput(
|
||||
writer: Writer,
|
||||
payload: CommandOutput<unknown>,
|
||||
format: OutputFormat = "json",
|
||||
) {
|
||||
const body = format === "text" ? renderText(payload) : JSON.stringify(payload, null, 2);
|
||||
writer.write(`${body}\n`);
|
||||
}
|
||||
35
skills/atlassian/shared/scripts/src/types.ts
Normal file
35
skills/atlassian/shared/scripts/src/types.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
export type AtlassianConfig = {
|
||||
baseUrl: string;
|
||||
jiraBaseUrl: string;
|
||||
confluenceBaseUrl: string;
|
||||
email: string;
|
||||
apiToken: string;
|
||||
defaultProject?: string;
|
||||
defaultSpace?: string;
|
||||
};
|
||||
|
||||
export type CommandOutput<T> = {
|
||||
ok: true;
|
||||
data: T;
|
||||
dryRun?: boolean;
|
||||
raw?: unknown;
|
||||
};
|
||||
|
||||
export type JiraIssueSummary = {
|
||||
key: string;
|
||||
summary: string;
|
||||
issueType: string;
|
||||
status: string;
|
||||
assignee?: string;
|
||||
created: string;
|
||||
updated: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
export type Writer = {
|
||||
write(chunk: string | Uint8Array): unknown;
|
||||
};
|
||||
|
||||
export type FetchLike = typeof fetch;
|
||||
|
||||
export type OutputFormat = "json" | "text";
|
||||
38
skills/atlassian/shared/scripts/tests/config.test.ts
Normal file
38
skills/atlassian/shared/scripts/tests/config.test.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
|
||||
import { createBasicAuthHeader, loadConfig } from "../src/config.js";
|
||||
|
||||
test("loadConfig derives Jira and Confluence base URLs from ATLASSIAN_BASE_URL", () => {
|
||||
const config = loadConfig({
|
||||
ATLASSIAN_BASE_URL: "https://example.atlassian.net/",
|
||||
ATLASSIAN_EMAIL: "dev@example.com",
|
||||
ATLASSIAN_API_TOKEN: "secret-token",
|
||||
ATLASSIAN_DEFAULT_PROJECT: "ENG",
|
||||
});
|
||||
|
||||
assert.deepEqual(config, {
|
||||
baseUrl: "https://example.atlassian.net",
|
||||
jiraBaseUrl: "https://example.atlassian.net",
|
||||
confluenceBaseUrl: "https://example.atlassian.net",
|
||||
email: "dev@example.com",
|
||||
apiToken: "secret-token",
|
||||
defaultProject: "ENG",
|
||||
defaultSpace: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
test("createBasicAuthHeader encodes email and API token for Atlassian Cloud", () => {
|
||||
const header = createBasicAuthHeader({
|
||||
baseUrl: "https://example.atlassian.net",
|
||||
jiraBaseUrl: "https://example.atlassian.net",
|
||||
confluenceBaseUrl: "https://example.atlassian.net",
|
||||
email: "dev@example.com",
|
||||
apiToken: "secret-token",
|
||||
});
|
||||
|
||||
assert.equal(
|
||||
header,
|
||||
`Basic ${Buffer.from("dev@example.com:secret-token").toString("base64")}`,
|
||||
);
|
||||
});
|
||||
71
skills/atlassian/shared/scripts/tests/helpers.ts
Normal file
71
skills/atlassian/shared/scripts/tests/helpers.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { mkdtempSync, rmSync, writeFileSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import path from "node:path";
|
||||
|
||||
import { buildProgram } from "../src/cli.js";
|
||||
|
||||
type RunCliOptions = {
|
||||
args: string[];
|
||||
cwd?: string;
|
||||
env?: NodeJS.ProcessEnv;
|
||||
fetchImpl?: typeof fetch;
|
||||
};
|
||||
|
||||
class MemoryWriter {
|
||||
private readonly chunks: string[] = [];
|
||||
|
||||
write(chunk: string | Uint8Array) {
|
||||
this.chunks.push(typeof chunk === "string" ? chunk : Buffer.from(chunk).toString("utf8"));
|
||||
return true;
|
||||
}
|
||||
|
||||
toString() {
|
||||
return this.chunks.join("");
|
||||
}
|
||||
}
|
||||
|
||||
export async function runCli(options: RunCliOptions) {
|
||||
const stdout = new MemoryWriter();
|
||||
const stderr = new MemoryWriter();
|
||||
const program = buildProgram({
|
||||
cwd: options.cwd,
|
||||
env: options.env,
|
||||
fetchImpl: options.fetchImpl,
|
||||
stdout,
|
||||
stderr,
|
||||
});
|
||||
|
||||
await program.parseAsync(options.args, { from: "user" });
|
||||
|
||||
return {
|
||||
stdout: stdout.toString(),
|
||||
stderr: stderr.toString(),
|
||||
};
|
||||
}
|
||||
|
||||
export function createTempWorkspace() {
|
||||
const cwd = mkdtempSync(path.join(tmpdir(), "atlassian-skill-"));
|
||||
|
||||
return {
|
||||
cwd,
|
||||
cleanup() {
|
||||
rmSync(cwd, { recursive: true, force: true });
|
||||
},
|
||||
write(relativePath: string, contents: string) {
|
||||
const target = path.join(cwd, relativePath);
|
||||
writeFileSync(target, contents, "utf8");
|
||||
return target;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function jsonResponse(payload: unknown, init?: ResponseInit) {
|
||||
return new Response(JSON.stringify(payload), {
|
||||
status: init?.status ?? 200,
|
||||
statusText: init?.statusText,
|
||||
headers: {
|
||||
"content-type": "application/json",
|
||||
...(init?.headers ?? {}),
|
||||
},
|
||||
});
|
||||
}
|
||||
321
skills/atlassian/shared/scripts/tests/jira.test.ts
Normal file
321
skills/atlassian/shared/scripts/tests/jira.test.ts
Normal file
@@ -0,0 +1,321 @@
|
||||
import test from "node:test";
|
||||
import assert from "node:assert/strict";
|
||||
|
||||
import { markdownToAdf } from "../src/adf.js";
|
||||
import { createTempWorkspace, jsonResponse, runCli } from "./helpers.js";
|
||||
|
||||
const baseEnv = {
|
||||
ATLASSIAN_BASE_URL: "https://example.atlassian.net",
|
||||
ATLASSIAN_EMAIL: "dev@example.com",
|
||||
ATLASSIAN_API_TOKEN: "secret-token",
|
||||
};
|
||||
|
||||
test("jira-search emits normalized results and uses pagination inputs", async () => {
|
||||
const calls: Array<{ url: string; init: RequestInit | undefined }> = [];
|
||||
|
||||
const fetchImpl: typeof fetch = async (input, init) => {
|
||||
const url = typeof input === "string" ? input : input.toString();
|
||||
calls.push({ url, init });
|
||||
|
||||
return jsonResponse({
|
||||
startAt: 10,
|
||||
maxResults: 2,
|
||||
total: 25,
|
||||
issues: [
|
||||
{
|
||||
key: "ENG-1",
|
||||
fields: {
|
||||
summary: "Add Jira search command",
|
||||
issuetype: { name: "Story" },
|
||||
status: { name: "In Progress" },
|
||||
assignee: { displayName: "Ada Lovelace" },
|
||||
created: "2026-03-01T00:00:00.000Z",
|
||||
updated: "2026-03-02T00:00:00.000Z",
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
};
|
||||
|
||||
const result = await runCli({
|
||||
args: ["jira-search", "--jql", "project = ENG", "--max-results", "2", "--start-at", "10"],
|
||||
env: baseEnv,
|
||||
fetchImpl,
|
||||
});
|
||||
|
||||
assert.equal(calls.length, 1);
|
||||
assert.equal(calls[0]?.url, "https://example.atlassian.net/rest/api/3/search");
|
||||
assert.equal(calls[0]?.init?.method, "POST");
|
||||
assert.match(String(calls[0]?.init?.headers), /Authorization/);
|
||||
assert.deepEqual(JSON.parse(String(calls[0]?.init?.body)), {
|
||||
jql: "project = ENG",
|
||||
maxResults: 2,
|
||||
startAt: 10,
|
||||
fields: ["summary", "issuetype", "status", "assignee", "created", "updated"],
|
||||
});
|
||||
|
||||
assert.deepEqual(JSON.parse(result.stdout), {
|
||||
ok: true,
|
||||
data: {
|
||||
issues: [
|
||||
{
|
||||
key: "ENG-1",
|
||||
summary: "Add Jira search command",
|
||||
issueType: "Story",
|
||||
status: "In Progress",
|
||||
assignee: "Ada Lovelace",
|
||||
created: "2026-03-01T00:00:00.000Z",
|
||||
updated: "2026-03-02T00:00:00.000Z",
|
||||
url: "https://example.atlassian.net/browse/ENG-1",
|
||||
},
|
||||
],
|
||||
startAt: 10,
|
||||
maxResults: 2,
|
||||
total: 25,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test("jira-get returns normalized fields plus the raw Jira payload", async () => {
|
||||
const rawIssue = {
|
||||
key: "ENG-42",
|
||||
fields: {
|
||||
summary: "Ship v1",
|
||||
issuetype: { name: "Task" },
|
||||
status: { name: "Done" },
|
||||
assignee: { displayName: "Grace Hopper" },
|
||||
created: "2026-03-03T00:00:00.000Z",
|
||||
updated: "2026-03-04T00:00:00.000Z",
|
||||
},
|
||||
};
|
||||
|
||||
const fetchImpl: typeof fetch = async () => jsonResponse(rawIssue);
|
||||
|
||||
const result = await runCli({
|
||||
args: ["jira-get", "--issue", "ENG-42"],
|
||||
env: baseEnv,
|
||||
fetchImpl,
|
||||
});
|
||||
|
||||
assert.deepEqual(JSON.parse(result.stdout), {
|
||||
ok: true,
|
||||
data: {
|
||||
issue: {
|
||||
key: "ENG-42",
|
||||
summary: "Ship v1",
|
||||
issueType: "Task",
|
||||
status: "Done",
|
||||
assignee: "Grace Hopper",
|
||||
created: "2026-03-03T00:00:00.000Z",
|
||||
updated: "2026-03-04T00:00:00.000Z",
|
||||
url: "https://example.atlassian.net/browse/ENG-42",
|
||||
},
|
||||
},
|
||||
raw: rawIssue,
|
||||
});
|
||||
});
|
||||
|
||||
test("markdownToAdf converts headings, paragraphs, and bullet lists", () => {
|
||||
assert.deepEqual(markdownToAdf("# Summary\n\nBuild the Jira skill.\n\n- Search\n- Comment"), {
|
||||
type: "doc",
|
||||
version: 1,
|
||||
content: [
|
||||
{
|
||||
type: "heading",
|
||||
attrs: { level: 1 },
|
||||
content: [{ type: "text", text: "Summary" }],
|
||||
},
|
||||
{
|
||||
type: "paragraph",
|
||||
content: [{ type: "text", text: "Build the Jira skill." }],
|
||||
},
|
||||
{
|
||||
type: "bulletList",
|
||||
content: [
|
||||
{
|
||||
type: "listItem",
|
||||
content: [
|
||||
{
|
||||
type: "paragraph",
|
||||
content: [{ type: "text", text: "Search" }],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
type: "listItem",
|
||||
content: [
|
||||
{
|
||||
type: "paragraph",
|
||||
content: [{ type: "text", text: "Comment" }],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
test("jira-create dry-run emits an ADF request body without calling Jira", async () => {
|
||||
const workspace = createTempWorkspace();
|
||||
|
||||
try {
|
||||
workspace.write("description.md", "# New story\n\n- one\n- two");
|
||||
let called = false;
|
||||
|
||||
const result = await runCli({
|
||||
args: [
|
||||
"jira-create",
|
||||
"--type",
|
||||
"Story",
|
||||
"--summary",
|
||||
"Create the Atlassian skill",
|
||||
"--description-file",
|
||||
"description.md",
|
||||
"--dry-run",
|
||||
],
|
||||
cwd: workspace.cwd,
|
||||
env: {
|
||||
...baseEnv,
|
||||
ATLASSIAN_DEFAULT_PROJECT: "ENG",
|
||||
},
|
||||
fetchImpl: async () => {
|
||||
called = true;
|
||||
return jsonResponse({});
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(called, false);
|
||||
assert.deepEqual(JSON.parse(result.stdout), {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: {
|
||||
method: "POST",
|
||||
url: "https://example.atlassian.net/rest/api/3/issue",
|
||||
body: {
|
||||
fields: {
|
||||
project: { key: "ENG" },
|
||||
issuetype: { name: "Story" },
|
||||
summary: "Create the Atlassian skill",
|
||||
description: markdownToAdf("# New story\n\n- one\n- two"),
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
} finally {
|
||||
workspace.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
test("jira-update, jira-comment, and jira-transition dry-runs build the expected Jira requests", async () => {
|
||||
const workspace = createTempWorkspace();
|
||||
|
||||
try {
|
||||
workspace.write("issue.md", "Updated description");
|
||||
workspace.write("comment.md", "Comment body");
|
||||
|
||||
const update = await runCli({
|
||||
args: [
|
||||
"jira-update",
|
||||
"--issue",
|
||||
"ENG-9",
|
||||
"--summary",
|
||||
"Updated summary",
|
||||
"--description-file",
|
||||
"issue.md",
|
||||
"--dry-run",
|
||||
],
|
||||
cwd: workspace.cwd,
|
||||
env: baseEnv,
|
||||
});
|
||||
|
||||
const comment = await runCli({
|
||||
args: ["jira-comment", "--issue", "ENG-9", "--body-file", "comment.md", "--dry-run"],
|
||||
cwd: workspace.cwd,
|
||||
env: baseEnv,
|
||||
});
|
||||
|
||||
const transition = await runCli({
|
||||
args: ["jira-transition", "--issue", "ENG-9", "--transition", "31", "--dry-run"],
|
||||
cwd: workspace.cwd,
|
||||
env: baseEnv,
|
||||
});
|
||||
|
||||
assert.deepEqual(JSON.parse(update.stdout), {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: {
|
||||
method: "PUT",
|
||||
url: "https://example.atlassian.net/rest/api/3/issue/ENG-9",
|
||||
body: {
|
||||
fields: {
|
||||
summary: "Updated summary",
|
||||
description: markdownToAdf("Updated description"),
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
assert.deepEqual(JSON.parse(comment.stdout), {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: {
|
||||
method: "POST",
|
||||
url: "https://example.atlassian.net/rest/api/3/issue/ENG-9/comment",
|
||||
body: {
|
||||
body: markdownToAdf("Comment body"),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
assert.deepEqual(JSON.parse(transition.stdout), {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: {
|
||||
method: "POST",
|
||||
url: "https://example.atlassian.net/rest/api/3/issue/ENG-9/transitions",
|
||||
body: {
|
||||
transition: {
|
||||
id: "31",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
} finally {
|
||||
workspace.cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
test("jira-transitions returns normalized transition options", async () => {
|
||||
const fetchImpl: typeof fetch = async () =>
|
||||
jsonResponse({
|
||||
transitions: [
|
||||
{
|
||||
id: "21",
|
||||
name: "Start Progress",
|
||||
to: { name: "In Progress" },
|
||||
hasScreen: false,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const result = await runCli({
|
||||
args: ["jira-transitions", "--issue", "ENG-9"],
|
||||
env: baseEnv,
|
||||
fetchImpl,
|
||||
});
|
||||
|
||||
assert.deepEqual(JSON.parse(result.stdout), {
|
||||
ok: true,
|
||||
data: {
|
||||
transitions: [
|
||||
{
|
||||
id: "21",
|
||||
name: "Start Progress",
|
||||
toStatus: "In Progress",
|
||||
hasScreen: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user