feat(atlassian): implement milestone M4 - packaging and doc sync
This commit is contained in:
@@ -56,10 +56,10 @@ ai-coding-skills/
|
||||
|
||||
| Skill | Agent Variant | Purpose | Status | Docs |
|
||||
|---|---|---|---|---|
|
||||
| atlassian | codex | Portable Jira and Confluence workflows through a shared Cloud-first CLI | In Progress | [ATLASSIAN](docs/ATLASSIAN.md) |
|
||||
| atlassian | claude-code | Portable Jira and Confluence workflows through a shared Cloud-first CLI | In Progress | [ATLASSIAN](docs/ATLASSIAN.md) |
|
||||
| atlassian | opencode | Portable Jira and Confluence workflows through a shared Cloud-first CLI | In Progress | [ATLASSIAN](docs/ATLASSIAN.md) |
|
||||
| atlassian | cursor | Portable Jira and Confluence workflows through a shared Cloud-first CLI | In Progress | [ATLASSIAN](docs/ATLASSIAN.md) |
|
||||
| atlassian | codex | Portable Jira and Confluence workflows through a shared Cloud-first CLI | Ready | [ATLASSIAN](docs/ATLASSIAN.md) |
|
||||
| atlassian | claude-code | Portable Jira and Confluence workflows through a shared Cloud-first CLI | Ready | [ATLASSIAN](docs/ATLASSIAN.md) |
|
||||
| atlassian | opencode | Portable Jira and Confluence workflows through a shared Cloud-first CLI | Ready | [ATLASSIAN](docs/ATLASSIAN.md) |
|
||||
| atlassian | cursor | Portable Jira and Confluence workflows through a shared Cloud-first CLI | Ready | [ATLASSIAN](docs/ATLASSIAN.md) |
|
||||
| create-plan | codex | Structured planning with milestones, iterative cross-model review, and runbook-first execution workflow | Ready | [CREATE-PLAN](docs/CREATE-PLAN.md) |
|
||||
| create-plan | claude-code | Structured planning with milestones, iterative cross-model review, and runbook-first execution workflow | Ready | [CREATE-PLAN](docs/CREATE-PLAN.md) |
|
||||
| create-plan | opencode | Structured planning with milestones, iterative cross-model review, and runbook-first execution workflow | Ready | [CREATE-PLAN](docs/CREATE-PLAN.md) |
|
||||
|
||||
@@ -8,6 +8,12 @@ Provide a portable Atlassian Cloud skill for Codex, Claude Code, Cursor Agent, a
|
||||
|
||||
The repo targets multiple agent environments with uneven MCP availability. This skill packages a consistent CLI contract so the same task-oriented workflows can be used across all supported agents without depending on MCP-specific integrations.
|
||||
|
||||
The canonical runtime lives in `skills/atlassian/shared/scripts/`. Installable per-agent `scripts/` bundles are generated from that source with:
|
||||
|
||||
```bash
|
||||
pnpm --dir skills/atlassian/shared/scripts sync:agents
|
||||
```
|
||||
|
||||
## Requirements
|
||||
|
||||
- Node.js 20+
|
||||
@@ -42,6 +48,15 @@ Optional:
|
||||
- `conf-children`
|
||||
- `raw`
|
||||
|
||||
## Command Notes
|
||||
|
||||
- `health` validates local configuration and shows the resolved Jira and Confluence base URLs.
|
||||
- `jira-create` requires `--type`, `--summary`, and either `--project` or `ATLASSIAN_DEFAULT_PROJECT`.
|
||||
- `jira-update` requires `--issue` and at least one of `--summary` or `--description-file`.
|
||||
- `conf-create` requires `--title`, `--body-file`, and either `--space` or `ATLASSIAN_DEFAULT_SPACE`.
|
||||
- `conf-update` requires `--page`, `--title`, and `--body-file`; it fetches the current page version before building the update payload.
|
||||
- `raw --body-file` expects a workspace-scoped JSON file and is limited to validated Atlassian API prefixes.
|
||||
|
||||
## Safety Model
|
||||
|
||||
- Default output is JSON.
|
||||
@@ -120,12 +135,19 @@ pnpm atlassian health
|
||||
- `pnpm atlassian jira-get --issue ENG-123`
|
||||
- Dry-run a Jira comment:
|
||||
- `pnpm atlassian jira-comment --issue ENG-123 --body-file comment.md --dry-run`
|
||||
- Dry-run a Jira issue create with default project fallback:
|
||||
- `pnpm atlassian jira-create --type Story --summary "Add Atlassian skill" --description-file story.md --dry-run`
|
||||
- Search Confluence pages:
|
||||
- `pnpm atlassian conf-search --query "title ~ \\\"Runbook\\\""`
|
||||
- `pnpm atlassian conf-search --query "title ~ \\\"Runbook\\\"" --max-results 10 --start-at 0`
|
||||
- Inspect a Confluence page:
|
||||
- `pnpm atlassian conf-get --page 12345`
|
||||
- Dry-run a Confluence page update:
|
||||
- `pnpm atlassian conf-update --page 12345 --title "Runbook" --body-file page.storage.html --dry-run`
|
||||
- Dry-run a Confluence footer comment:
|
||||
- `pnpm atlassian conf-comment --page 12345 --body-file comment.storage.html --dry-run`
|
||||
- Use bounded raw mode:
|
||||
- `pnpm atlassian raw --product jira --method GET --path "/rest/api/3/issue/ENG-123"`
|
||||
- `pnpm atlassian raw --product confluence --method POST --path "/wiki/api/v2/pages" --body-file page.json --dry-run`
|
||||
|
||||
## Scope Notes
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ This directory contains user-facing docs for each skill.
|
||||
|
||||
## Index
|
||||
|
||||
- [ATLASSIAN.md](./ATLASSIAN.md) — Includes requirements, install, auth, safety rules, and usage examples for the Atlassian skill.
|
||||
- [ATLASSIAN.md](./ATLASSIAN.md) — Includes requirements, generated bundle sync, install, auth, safety rules, and usage examples for the Atlassian skill.
|
||||
- [CREATE-PLAN.md](./CREATE-PLAN.md) — Includes requirements, install, verification, and execution workflow for create-plan.
|
||||
- [IMPLEMENT-PLAN.md](./IMPLEMENT-PLAN.md) — Includes requirements, install, verification, and milestone review workflow for implement-plan.
|
||||
- [WEB-AUTOMATION.md](./WEB-AUTOMATION.md) — Includes requirements, install, dependency verification, and usage examples for web-automation.
|
||||
|
||||
20
skills/atlassian/claude-code/scripts/package.json
Normal file
20
skills/atlassian/claude-code/scripts/package.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"name": "atlassian-skill-scripts",
|
||||
"version": "1.0.0",
|
||||
"description": "Shared runtime for the Atlassian skill",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"atlassian": "tsx src/cli.ts",
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"commander": "^13.1.0",
|
||||
"dotenv": "^16.4.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.3.0",
|
||||
"tsx": "^4.20.5",
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"packageManager": "pnpm@10.18.1+sha512.77a884a165cbba2d8d1c19e3b4880eee6d2fcabd0d879121e282196b80042351d5eb3ca0935fa599da1dc51265cc68816ad2bddd2a2de5ea9fdf92adbec7cd34"
|
||||
}
|
||||
361
skills/atlassian/claude-code/scripts/pnpm-lock.yaml
generated
Normal file
361
skills/atlassian/claude-code/scripts/pnpm-lock.yaml
generated
Normal file
@@ -0,0 +1,361 @@
|
||||
lockfileVersion: '9.0'
|
||||
|
||||
settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
commander:
|
||||
specifier: ^13.1.0
|
||||
version: 13.1.0
|
||||
dotenv:
|
||||
specifier: ^16.4.7
|
||||
version: 16.6.1
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^24.3.0
|
||||
version: 24.12.0
|
||||
tsx:
|
||||
specifier: ^4.20.5
|
||||
version: 4.21.0
|
||||
typescript:
|
||||
specifier: ^5.9.2
|
||||
version: 5.9.3
|
||||
|
||||
packages:
|
||||
|
||||
'@esbuild/aix-ppc64@0.27.3':
|
||||
resolution: {integrity: sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ppc64]
|
||||
os: [aix]
|
||||
|
||||
'@esbuild/android-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/android-arm@0.27.3':
|
||||
resolution: {integrity: sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/android-x64@0.27.3':
|
||||
resolution: {integrity: sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/darwin-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [darwin]
|
||||
|
||||
'@esbuild/darwin-x64@0.27.3':
|
||||
resolution: {integrity: sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [darwin]
|
||||
|
||||
'@esbuild/freebsd-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [freebsd]
|
||||
|
||||
'@esbuild/freebsd-x64@0.27.3':
|
||||
resolution: {integrity: sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [freebsd]
|
||||
|
||||
'@esbuild/linux-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-arm@0.27.3':
|
||||
resolution: {integrity: sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-ia32@0.27.3':
|
||||
resolution: {integrity: sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ia32]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-loong64@0.27.3':
|
||||
resolution: {integrity: sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [loong64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-mips64el@0.27.3':
|
||||
resolution: {integrity: sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [mips64el]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-ppc64@0.27.3':
|
||||
resolution: {integrity: sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ppc64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-riscv64@0.27.3':
|
||||
resolution: {integrity: sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [riscv64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-s390x@0.27.3':
|
||||
resolution: {integrity: sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [s390x]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-x64@0.27.3':
|
||||
resolution: {integrity: sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/netbsd-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [netbsd]
|
||||
|
||||
'@esbuild/netbsd-x64@0.27.3':
|
||||
resolution: {integrity: sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [netbsd]
|
||||
|
||||
'@esbuild/openbsd-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [openbsd]
|
||||
|
||||
'@esbuild/openbsd-x64@0.27.3':
|
||||
resolution: {integrity: sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [openbsd]
|
||||
|
||||
'@esbuild/openharmony-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [openharmony]
|
||||
|
||||
'@esbuild/sunos-x64@0.27.3':
|
||||
resolution: {integrity: sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [sunos]
|
||||
|
||||
'@esbuild/win32-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [win32]
|
||||
|
||||
'@esbuild/win32-ia32@0.27.3':
|
||||
resolution: {integrity: sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ia32]
|
||||
os: [win32]
|
||||
|
||||
'@esbuild/win32-x64@0.27.3':
|
||||
resolution: {integrity: sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [win32]
|
||||
|
||||
'@types/node@24.12.0':
|
||||
resolution: {integrity: sha512-GYDxsZi3ChgmckRT9HPU0WEhKLP08ev/Yfcq2AstjrDASOYCSXeyjDsHg4v5t4jOj7cyDX3vmprafKlWIG9MXQ==}
|
||||
|
||||
commander@13.1.0:
|
||||
resolution: {integrity: sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
dotenv@16.6.1:
|
||||
resolution: {integrity: sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
esbuild@0.27.3:
|
||||
resolution: {integrity: sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==}
|
||||
engines: {node: '>=18'}
|
||||
hasBin: true
|
||||
|
||||
fsevents@2.3.3:
|
||||
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
|
||||
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
|
||||
os: [darwin]
|
||||
|
||||
get-tsconfig@4.13.6:
|
||||
resolution: {integrity: sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==}
|
||||
|
||||
resolve-pkg-maps@1.0.0:
|
||||
resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==}
|
||||
|
||||
tsx@4.21.0:
|
||||
resolution: {integrity: sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==}
|
||||
engines: {node: '>=18.0.0'}
|
||||
hasBin: true
|
||||
|
||||
typescript@5.9.3:
|
||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
||||
engines: {node: '>=14.17'}
|
||||
hasBin: true
|
||||
|
||||
undici-types@7.16.0:
|
||||
resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==}
|
||||
|
||||
snapshots:
|
||||
|
||||
'@esbuild/aix-ppc64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-arm@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/darwin-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/darwin-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/freebsd-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/freebsd-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-arm@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-ia32@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-loong64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-mips64el@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-ppc64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-riscv64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-s390x@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/netbsd-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/netbsd-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openbsd-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openbsd-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openharmony-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/sunos-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-ia32@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@types/node@24.12.0':
|
||||
dependencies:
|
||||
undici-types: 7.16.0
|
||||
|
||||
commander@13.1.0: {}
|
||||
|
||||
dotenv@16.6.1: {}
|
||||
|
||||
esbuild@0.27.3:
|
||||
optionalDependencies:
|
||||
'@esbuild/aix-ppc64': 0.27.3
|
||||
'@esbuild/android-arm': 0.27.3
|
||||
'@esbuild/android-arm64': 0.27.3
|
||||
'@esbuild/android-x64': 0.27.3
|
||||
'@esbuild/darwin-arm64': 0.27.3
|
||||
'@esbuild/darwin-x64': 0.27.3
|
||||
'@esbuild/freebsd-arm64': 0.27.3
|
||||
'@esbuild/freebsd-x64': 0.27.3
|
||||
'@esbuild/linux-arm': 0.27.3
|
||||
'@esbuild/linux-arm64': 0.27.3
|
||||
'@esbuild/linux-ia32': 0.27.3
|
||||
'@esbuild/linux-loong64': 0.27.3
|
||||
'@esbuild/linux-mips64el': 0.27.3
|
||||
'@esbuild/linux-ppc64': 0.27.3
|
||||
'@esbuild/linux-riscv64': 0.27.3
|
||||
'@esbuild/linux-s390x': 0.27.3
|
||||
'@esbuild/linux-x64': 0.27.3
|
||||
'@esbuild/netbsd-arm64': 0.27.3
|
||||
'@esbuild/netbsd-x64': 0.27.3
|
||||
'@esbuild/openbsd-arm64': 0.27.3
|
||||
'@esbuild/openbsd-x64': 0.27.3
|
||||
'@esbuild/openharmony-arm64': 0.27.3
|
||||
'@esbuild/sunos-x64': 0.27.3
|
||||
'@esbuild/win32-arm64': 0.27.3
|
||||
'@esbuild/win32-ia32': 0.27.3
|
||||
'@esbuild/win32-x64': 0.27.3
|
||||
|
||||
fsevents@2.3.3:
|
||||
optional: true
|
||||
|
||||
get-tsconfig@4.13.6:
|
||||
dependencies:
|
||||
resolve-pkg-maps: 1.0.0
|
||||
|
||||
resolve-pkg-maps@1.0.0: {}
|
||||
|
||||
tsx@4.21.0:
|
||||
dependencies:
|
||||
esbuild: 0.27.3
|
||||
get-tsconfig: 4.13.6
|
||||
optionalDependencies:
|
||||
fsevents: 2.3.3
|
||||
|
||||
typescript@5.9.3: {}
|
||||
|
||||
undici-types@7.16.0: {}
|
||||
92
skills/atlassian/claude-code/scripts/src/adf.ts
Normal file
92
skills/atlassian/claude-code/scripts/src/adf.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
const TEXT_NODE = "text";
|
||||
|
||||
function textNode(text: string) {
|
||||
return {
|
||||
type: TEXT_NODE,
|
||||
text,
|
||||
};
|
||||
}
|
||||
|
||||
function paragraphNode(lines: string[]) {
|
||||
const content: Array<{ type: string; text?: string }> = [];
|
||||
|
||||
lines.forEach((line, index) => {
|
||||
if (index > 0) {
|
||||
content.push({ type: "hardBreak" });
|
||||
}
|
||||
|
||||
if (line.length > 0) {
|
||||
content.push(textNode(line));
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
type: "paragraph",
|
||||
...(content.length > 0 ? { content } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
export function markdownToAdf(input: string) {
|
||||
const lines = input.replace(/\r\n/g, "\n").split("\n");
|
||||
const content: Array<Record<string, unknown>> = [];
|
||||
let index = 0;
|
||||
|
||||
while (index < lines.length) {
|
||||
const current = lines[index]?.trimEnd() ?? "";
|
||||
|
||||
if (current.trim().length === 0) {
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
const heading = current.match(/^(#{1,6})\s+(.*)$/);
|
||||
|
||||
if (heading) {
|
||||
content.push({
|
||||
type: "heading",
|
||||
attrs: { level: heading[1].length },
|
||||
content: [textNode(heading[2])],
|
||||
});
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (/^[-*]\s+/.test(current)) {
|
||||
const items: Array<Record<string, unknown>> = [];
|
||||
|
||||
while (index < lines.length && /^[-*]\s+/.test(lines[index] ?? "")) {
|
||||
items.push({
|
||||
type: "listItem",
|
||||
content: [
|
||||
{
|
||||
type: "paragraph",
|
||||
content: [textNode((lines[index] ?? "").replace(/^[-*]\s+/, ""))],
|
||||
},
|
||||
],
|
||||
});
|
||||
index += 1;
|
||||
}
|
||||
|
||||
content.push({
|
||||
type: "bulletList",
|
||||
content: items,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
const paragraphLines: string[] = [];
|
||||
|
||||
while (index < lines.length && (lines[index]?.trim().length ?? 0) > 0) {
|
||||
paragraphLines.push(lines[index] ?? "");
|
||||
index += 1;
|
||||
}
|
||||
|
||||
content.push(paragraphNode(paragraphLines));
|
||||
}
|
||||
|
||||
return {
|
||||
type: "doc",
|
||||
version: 1,
|
||||
content,
|
||||
};
|
||||
}
|
||||
339
skills/atlassian/claude-code/scripts/src/cli.ts
Normal file
339
skills/atlassian/claude-code/scripts/src/cli.ts
Normal file
@@ -0,0 +1,339 @@
|
||||
import process from "node:process";
|
||||
import { pathToFileURL } from "node:url";
|
||||
|
||||
import { Command } from "commander";
|
||||
|
||||
import { createConfluenceClient } from "./confluence.js";
|
||||
import { loadConfig } from "./config.js";
|
||||
import { readWorkspaceFile } from "./files.js";
|
||||
import { createJiraClient } from "./jira.js";
|
||||
import { writeOutput } from "./output.js";
|
||||
import { runRawCommand } from "./raw.js";
|
||||
import type { FetchLike, OutputFormat, Writer } from "./types.js";
|
||||
|
||||
type CliContext = {
|
||||
cwd?: string;
|
||||
env?: NodeJS.ProcessEnv;
|
||||
fetchImpl?: FetchLike;
|
||||
stdout?: Writer;
|
||||
stderr?: Writer;
|
||||
};
|
||||
|
||||
function resolveFormat(format: string | undefined): OutputFormat {
|
||||
return format === "text" ? "text" : "json";
|
||||
}
|
||||
|
||||
function createRuntime(context: CliContext) {
|
||||
const cwd = context.cwd ?? process.cwd();
|
||||
const env = context.env ?? process.env;
|
||||
const stdout = context.stdout ?? process.stdout;
|
||||
const stderr = context.stderr ?? process.stderr;
|
||||
let configCache: ReturnType<typeof loadConfig> | undefined;
|
||||
let jiraCache: ReturnType<typeof createJiraClient> | undefined;
|
||||
let confluenceCache: ReturnType<typeof createConfluenceClient> | undefined;
|
||||
|
||||
function getConfig() {
|
||||
configCache ??= loadConfig(env, { cwd });
|
||||
return configCache;
|
||||
}
|
||||
|
||||
function getJiraClient() {
|
||||
jiraCache ??= createJiraClient({
|
||||
config: getConfig(),
|
||||
fetchImpl: context.fetchImpl,
|
||||
});
|
||||
return jiraCache;
|
||||
}
|
||||
|
||||
function getConfluenceClient() {
|
||||
confluenceCache ??= createConfluenceClient({
|
||||
config: getConfig(),
|
||||
fetchImpl: context.fetchImpl,
|
||||
});
|
||||
return confluenceCache;
|
||||
}
|
||||
|
||||
async function readBodyFile(filePath: string | undefined) {
|
||||
if (!filePath) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return readWorkspaceFile(filePath, cwd);
|
||||
}
|
||||
|
||||
return {
|
||||
cwd,
|
||||
stdout,
|
||||
stderr,
|
||||
readBodyFile,
|
||||
getConfig,
|
||||
getJiraClient,
|
||||
getConfluenceClient,
|
||||
fetchImpl: context.fetchImpl,
|
||||
};
|
||||
}
|
||||
|
||||
export function buildProgram(context: CliContext = {}) {
|
||||
const runtime = createRuntime(context);
|
||||
const program = new Command()
|
||||
.name("atlassian")
|
||||
.description("Portable Atlassian CLI for multi-agent skills")
|
||||
.version("0.1.0");
|
||||
|
||||
program
|
||||
.command("health")
|
||||
.description("Validate configuration and Atlassian connectivity")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action((options) => {
|
||||
writeOutput(
|
||||
runtime.stdout,
|
||||
{
|
||||
ok: true,
|
||||
data: {
|
||||
baseUrl: runtime.getConfig().baseUrl,
|
||||
jiraBaseUrl: runtime.getConfig().jiraBaseUrl,
|
||||
confluenceBaseUrl: runtime.getConfig().confluenceBaseUrl,
|
||||
defaultProject: runtime.getConfig().defaultProject,
|
||||
defaultSpace: runtime.getConfig().defaultSpace,
|
||||
},
|
||||
},
|
||||
resolveFormat(options.format),
|
||||
);
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-search")
|
||||
.requiredOption("--query <query>", "CQL search query")
|
||||
.option("--max-results <number>", "Maximum results to return", "50")
|
||||
.option("--start-at <number>", "Result offset", "0")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().searchPages({
|
||||
query: options.query,
|
||||
maxResults: Number(options.maxResults),
|
||||
startAt: Number(options.startAt),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-get")
|
||||
.requiredOption("--page <page>", "Confluence page ID")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().getPage(options.page);
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-create")
|
||||
.requiredOption("--title <title>", "Confluence page title")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative storage-format body file")
|
||||
.option("--space <space>", "Confluence space ID")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().createPage({
|
||||
space: options.space,
|
||||
title: options.title,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-update")
|
||||
.requiredOption("--page <page>", "Confluence page ID")
|
||||
.requiredOption("--title <title>", "Confluence page title")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative storage-format body file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().updatePage({
|
||||
pageId: options.page,
|
||||
title: options.title,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-comment")
|
||||
.requiredOption("--page <page>", "Confluence page ID")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative storage-format body file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().commentPage({
|
||||
pageId: options.page,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-children")
|
||||
.requiredOption("--page <page>", "Confluence page ID")
|
||||
.option("--max-results <number>", "Maximum results to return", "50")
|
||||
.option("--start-at <number>", "Cursor/start token", "0")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().listChildren(
|
||||
options.page,
|
||||
Number(options.maxResults),
|
||||
Number(options.startAt),
|
||||
);
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("raw")
|
||||
.requiredOption("--product <product>", "jira or confluence")
|
||||
.requiredOption("--method <method>", "GET, POST, or PUT")
|
||||
.requiredOption("--path <path>", "Validated API path")
|
||||
.option("--body-file <path>", "Workspace-relative JSON file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runRawCommand(runtime.getConfig(), runtime.fetchImpl, {
|
||||
product: options.product,
|
||||
method: String(options.method).toUpperCase(),
|
||||
path: options.path,
|
||||
bodyFile: options.bodyFile,
|
||||
cwd: runtime.cwd,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-search")
|
||||
.requiredOption("--jql <jql>", "JQL expression to execute")
|
||||
.option("--max-results <number>", "Maximum results to return", "50")
|
||||
.option("--start-at <number>", "Result offset", "0")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().searchIssues({
|
||||
jql: options.jql,
|
||||
maxResults: Number(options.maxResults),
|
||||
startAt: Number(options.startAt),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-get")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().getIssue(options.issue);
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-create")
|
||||
.requiredOption("--type <type>", "Issue type name")
|
||||
.requiredOption("--summary <summary>", "Issue summary")
|
||||
.option("--project <project>", "Project key")
|
||||
.option("--description-file <path>", "Workspace-relative markdown/text file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().createIssue({
|
||||
project: options.project,
|
||||
type: options.type,
|
||||
summary: options.summary,
|
||||
description: await runtime.readBodyFile(options.descriptionFile),
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-update")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.option("--summary <summary>", "Updated summary")
|
||||
.option("--description-file <path>", "Workspace-relative markdown/text file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().updateIssue({
|
||||
issue: options.issue,
|
||||
summary: options.summary,
|
||||
description: await runtime.readBodyFile(options.descriptionFile),
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-comment")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative markdown/text file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().commentIssue({
|
||||
issue: options.issue,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-transitions")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().getTransitions(options.issue);
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-transition")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.requiredOption("--transition <transition>", "Transition ID")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().transitionIssue({
|
||||
issue: options.issue,
|
||||
transition: options.transition,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
return program;
|
||||
}
|
||||
|
||||
export async function runCli(argv = process.argv, context: CliContext = {}) {
|
||||
const program = buildProgram(context);
|
||||
await program.parseAsync(argv);
|
||||
}
|
||||
|
||||
const isDirectExecution =
|
||||
Boolean(process.argv[1]) && import.meta.url === pathToFileURL(process.argv[1]).href;
|
||||
|
||||
if (isDirectExecution) {
|
||||
runCli().catch((error: unknown) => {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
process.stderr.write(`${message}\n`);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
}
|
||||
52
skills/atlassian/claude-code/scripts/src/config.ts
Normal file
52
skills/atlassian/claude-code/scripts/src/config.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import path from "node:path";
|
||||
|
||||
import { config as loadDotEnv } from "dotenv";
|
||||
|
||||
import type { AtlassianConfig } from "./types.js";
|
||||
|
||||
function normalizeBaseUrl(value: string) {
|
||||
return value.replace(/\/+$/, "");
|
||||
}
|
||||
|
||||
function readRequired(env: NodeJS.ProcessEnv, key: string) {
|
||||
const value = env[key]?.trim();
|
||||
|
||||
if (!value) {
|
||||
throw new Error(`Missing required environment variable: ${key}`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
export function loadConfig(
|
||||
env: NodeJS.ProcessEnv = process.env,
|
||||
options?: {
|
||||
cwd?: string;
|
||||
},
|
||||
): AtlassianConfig {
|
||||
loadDotEnv({
|
||||
path: path.resolve(options?.cwd ?? process.cwd(), ".env"),
|
||||
processEnv: env as Record<string, string>,
|
||||
override: false,
|
||||
});
|
||||
|
||||
const baseUrl = normalizeBaseUrl(readRequired(env, "ATLASSIAN_BASE_URL"));
|
||||
|
||||
return {
|
||||
baseUrl,
|
||||
jiraBaseUrl: normalizeBaseUrl(env.ATLASSIAN_JIRA_BASE_URL?.trim() || baseUrl),
|
||||
confluenceBaseUrl: normalizeBaseUrl(env.ATLASSIAN_CONFLUENCE_BASE_URL?.trim() || baseUrl),
|
||||
email: readRequired(env, "ATLASSIAN_EMAIL"),
|
||||
apiToken: readRequired(env, "ATLASSIAN_API_TOKEN"),
|
||||
defaultProject: env.ATLASSIAN_DEFAULT_PROJECT?.trim() || undefined,
|
||||
defaultSpace: env.ATLASSIAN_DEFAULT_SPACE?.trim() || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
export function createBasicAuthHeader(config: {
|
||||
email: string;
|
||||
apiToken: string;
|
||||
[key: string]: unknown;
|
||||
}) {
|
||||
return `Basic ${Buffer.from(`${config.email}:${config.apiToken}`).toString("base64")}`;
|
||||
}
|
||||
292
skills/atlassian/claude-code/scripts/src/confluence.ts
Normal file
292
skills/atlassian/claude-code/scripts/src/confluence.ts
Normal file
@@ -0,0 +1,292 @@
|
||||
import { sendJsonRequest } from "./http.js";
|
||||
import type { AtlassianConfig, CommandOutput, FetchLike } from "./types.js";
|
||||
|
||||
type ConfluenceClientOptions = {
|
||||
config: AtlassianConfig;
|
||||
fetchImpl?: FetchLike;
|
||||
};
|
||||
|
||||
type SearchInput = {
|
||||
query: string;
|
||||
maxResults: number;
|
||||
startAt: number;
|
||||
};
|
||||
|
||||
type CreateInput = {
|
||||
space?: string;
|
||||
title: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type UpdateInput = {
|
||||
pageId: string;
|
||||
title: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type CommentInput = {
|
||||
pageId: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type PageSummary = {
|
||||
id: string;
|
||||
title: string;
|
||||
type: string;
|
||||
status?: string;
|
||||
spaceId?: string;
|
||||
url?: string;
|
||||
};
|
||||
|
||||
function buildUrl(baseUrl: string, path: string) {
|
||||
return new URL(path, `${baseUrl}/`).toString();
|
||||
}
|
||||
|
||||
function normalizePage(baseUrl: string, page: Record<string, unknown>, excerpt?: string) {
|
||||
const links = (page._links ?? {}) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
id: String(page.id ?? ""),
|
||||
title: String(page.title ?? ""),
|
||||
type: String(page.type ?? "page"),
|
||||
...(page.status ? { status: String(page.status) } : {}),
|
||||
...(page.spaceId ? { spaceId: String(page.spaceId) } : {}),
|
||||
...(excerpt ? { excerpt } : {}),
|
||||
...(links.webui ? { url: `${baseUrl}${String(links.webui)}` } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
export function createConfluenceClient(options: ConfluenceClientOptions) {
|
||||
const config = options.config;
|
||||
|
||||
async function getPageForUpdate(pageId: string) {
|
||||
return (await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: buildUrl(config.confluenceBaseUrl, `/wiki/api/v2/pages/${pageId}?body-format=storage`),
|
||||
method: "GET",
|
||||
errorPrefix: "Confluence request failed",
|
||||
})) as Record<string, unknown>;
|
||||
}
|
||||
|
||||
return {
|
||||
async searchPages(input: SearchInput): Promise<CommandOutput<unknown>> {
|
||||
const url = new URL("/wiki/rest/api/search", `${config.confluenceBaseUrl}/`);
|
||||
url.searchParams.set("cql", input.query);
|
||||
url.searchParams.set("limit", String(input.maxResults));
|
||||
url.searchParams.set("start", String(input.startAt));
|
||||
|
||||
const raw = (await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: url.toString(),
|
||||
method: "GET",
|
||||
errorPrefix: "Confluence request failed",
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const results = Array.isArray(raw.results) ? raw.results : [];
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
pages: results.map((entry) => {
|
||||
const result = entry as Record<string, unknown>;
|
||||
return normalizePage(
|
||||
config.baseUrl,
|
||||
(result.content ?? {}) as Record<string, unknown>,
|
||||
result.excerpt ? String(result.excerpt) : undefined,
|
||||
);
|
||||
}),
|
||||
startAt: Number(raw.start ?? input.startAt),
|
||||
maxResults: Number(raw.limit ?? input.maxResults),
|
||||
total: Number(raw.totalSize ?? raw.size ?? results.length),
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async getPage(pageId: string): Promise<CommandOutput<unknown>> {
|
||||
const raw = (await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: buildUrl(config.confluenceBaseUrl, `/wiki/api/v2/pages/${pageId}?body-format=storage`),
|
||||
method: "GET",
|
||||
errorPrefix: "Confluence request failed",
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const body = ((raw.body ?? {}) as Record<string, unknown>).storage as Record<string, unknown> | undefined;
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
page: {
|
||||
...normalizePage(config.baseUrl, raw),
|
||||
version: Number((((raw.version ?? {}) as Record<string, unknown>).number ?? 0)),
|
||||
body: body?.value ? String(body.value) : "",
|
||||
},
|
||||
},
|
||||
raw,
|
||||
};
|
||||
},
|
||||
|
||||
async listChildren(pageId: string, maxResults: number, startAt: number): Promise<CommandOutput<unknown>> {
|
||||
const url = new URL(`/wiki/api/v2/pages/${pageId}/direct-children`, `${config.confluenceBaseUrl}/`);
|
||||
url.searchParams.set("limit", String(maxResults));
|
||||
url.searchParams.set("cursor", String(startAt));
|
||||
|
||||
const raw = (await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: url.toString(),
|
||||
method: "GET",
|
||||
errorPrefix: "Confluence request failed",
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const results = Array.isArray(raw.results) ? raw.results : [];
|
||||
const links = (raw._links ?? {}) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
pages: results.map((page) => normalizePage(config.baseUrl, page as Record<string, unknown>)),
|
||||
nextCursor: links.next ? String(links.next) : null,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async createPage(input: CreateInput): Promise<CommandOutput<unknown>> {
|
||||
const spaceId = input.space || config.defaultSpace;
|
||||
|
||||
if (!spaceId) {
|
||||
throw new Error("conf-create requires --space or ATLASSIAN_DEFAULT_SPACE");
|
||||
}
|
||||
|
||||
const request = {
|
||||
method: "POST" as const,
|
||||
url: buildUrl(config.confluenceBaseUrl, "/wiki/api/v2/pages"),
|
||||
body: {
|
||||
spaceId,
|
||||
title: input.title,
|
||||
status: "current",
|
||||
body: {
|
||||
representation: "storage",
|
||||
value: input.body,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: request.url,
|
||||
method: request.method,
|
||||
body: request.body,
|
||||
errorPrefix: "Confluence request failed",
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
|
||||
async updatePage(input: UpdateInput): Promise<CommandOutput<unknown>> {
|
||||
const currentPage = await getPageForUpdate(input.pageId);
|
||||
const version = (((currentPage.version ?? {}) as Record<string, unknown>).number ?? 0) as number;
|
||||
const spaceId = String(currentPage.spaceId ?? "");
|
||||
|
||||
const request = {
|
||||
method: "PUT" as const,
|
||||
url: buildUrl(config.confluenceBaseUrl, `/wiki/api/v2/pages/${input.pageId}`),
|
||||
body: {
|
||||
id: input.pageId,
|
||||
status: String(currentPage.status ?? "current"),
|
||||
title: input.title,
|
||||
spaceId,
|
||||
version: {
|
||||
number: Number(version) + 1,
|
||||
},
|
||||
body: {
|
||||
representation: "storage",
|
||||
value: input.body,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: request.url,
|
||||
method: request.method,
|
||||
body: request.body,
|
||||
errorPrefix: "Confluence request failed",
|
||||
handleResponseError(response) {
|
||||
if (response.status === 409) {
|
||||
return new Error(`Confluence update conflict: page ${input.pageId} was updated by someone else`);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
|
||||
async commentPage(input: CommentInput): Promise<CommandOutput<unknown>> {
|
||||
const request = {
|
||||
method: "POST" as const,
|
||||
url: buildUrl(config.confluenceBaseUrl, "/wiki/api/v2/footer-comments"),
|
||||
body: {
|
||||
pageId: input.pageId,
|
||||
body: {
|
||||
representation: "storage",
|
||||
value: input.body,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: request.url,
|
||||
method: request.method,
|
||||
body: request.body,
|
||||
errorPrefix: "Confluence request failed",
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
13
skills/atlassian/claude-code/scripts/src/files.ts
Normal file
13
skills/atlassian/claude-code/scripts/src/files.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { readFile } from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
|
||||
export async function readWorkspaceFile(filePath: string, cwd: string) {
|
||||
const resolved = path.resolve(cwd, filePath);
|
||||
const relative = path.relative(cwd, resolved);
|
||||
|
||||
if (relative.startsWith("..") || path.isAbsolute(relative)) {
|
||||
throw new Error(`--body-file must stay within the active workspace: ${filePath}`);
|
||||
}
|
||||
|
||||
return readFile(resolved, "utf8");
|
||||
}
|
||||
65
skills/atlassian/claude-code/scripts/src/http.ts
Normal file
65
skills/atlassian/claude-code/scripts/src/http.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { createBasicAuthHeader } from "./config.js";
|
||||
import type { AtlassianConfig, FetchLike } from "./types.js";
|
||||
|
||||
export type HttpMethod = "GET" | "POST" | "PUT";
|
||||
|
||||
export function createJsonHeaders(config: AtlassianConfig, includeJsonBody: boolean) {
|
||||
const headers: Array<[string, string]> = [
|
||||
["Accept", "application/json"],
|
||||
["Authorization", createBasicAuthHeader(config)],
|
||||
];
|
||||
|
||||
if (includeJsonBody) {
|
||||
headers.push(["Content-Type", "application/json"]);
|
||||
}
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
export async function parseResponse(response: Response) {
|
||||
if (response.status === 204) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const contentType = response.headers.get("content-type") ?? "";
|
||||
|
||||
if (contentType.includes("application/json")) {
|
||||
return response.json();
|
||||
}
|
||||
|
||||
return response.text();
|
||||
}
|
||||
|
||||
export async function sendJsonRequest(options: {
|
||||
config: AtlassianConfig;
|
||||
fetchImpl?: FetchLike;
|
||||
url: string;
|
||||
method: HttpMethod;
|
||||
body?: unknown;
|
||||
errorPrefix: string;
|
||||
handleResponseError?: (response: Response) => Error | undefined;
|
||||
}) {
|
||||
const fetchImpl = options.fetchImpl ?? globalThis.fetch;
|
||||
|
||||
if (!fetchImpl) {
|
||||
throw new Error("Fetch API is not available in this runtime");
|
||||
}
|
||||
|
||||
const response = await fetchImpl(options.url, {
|
||||
method: options.method,
|
||||
headers: createJsonHeaders(options.config, options.body !== undefined),
|
||||
...(options.body === undefined ? {} : { body: JSON.stringify(options.body) }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const customError = options.handleResponseError?.(response);
|
||||
|
||||
if (customError) {
|
||||
throw customError;
|
||||
}
|
||||
|
||||
throw new Error(`${options.errorPrefix}: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
return parseResponse(response);
|
||||
}
|
||||
264
skills/atlassian/claude-code/scripts/src/jira.ts
Normal file
264
skills/atlassian/claude-code/scripts/src/jira.ts
Normal file
@@ -0,0 +1,264 @@
|
||||
import { markdownToAdf } from "./adf.js";
|
||||
import { sendJsonRequest } from "./http.js";
|
||||
import type { AtlassianConfig, CommandOutput, FetchLike, JiraIssueSummary } from "./types.js";
|
||||
|
||||
const ISSUE_FIELDS = ["summary", "issuetype", "status", "assignee", "created", "updated"] as const;
|
||||
|
||||
type JiraClientOptions = {
|
||||
config: AtlassianConfig;
|
||||
fetchImpl?: FetchLike;
|
||||
};
|
||||
|
||||
type SearchInput = {
|
||||
jql: string;
|
||||
maxResults: number;
|
||||
startAt: number;
|
||||
};
|
||||
|
||||
type CreateInput = {
|
||||
project?: string;
|
||||
type: string;
|
||||
summary: string;
|
||||
description?: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type UpdateInput = {
|
||||
issue: string;
|
||||
summary?: string;
|
||||
description?: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type CommentInput = {
|
||||
issue: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type TransitionInput = {
|
||||
issue: string;
|
||||
transition: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
function normalizeIssue(config: AtlassianConfig, issue: Record<string, unknown>): JiraIssueSummary {
|
||||
const fields = (issue.fields ?? {}) as Record<string, unknown>;
|
||||
const issueType = (fields.issuetype ?? {}) as Record<string, unknown>;
|
||||
const status = (fields.status ?? {}) as Record<string, unknown>;
|
||||
const assignee = (fields.assignee ?? {}) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
key: String(issue.key ?? ""),
|
||||
summary: String(fields.summary ?? ""),
|
||||
issueType: String(issueType.name ?? ""),
|
||||
status: String(status.name ?? ""),
|
||||
assignee: assignee.displayName ? String(assignee.displayName) : undefined,
|
||||
created: String(fields.created ?? ""),
|
||||
updated: String(fields.updated ?? ""),
|
||||
url: `${config.baseUrl}/browse/${issue.key ?? ""}`,
|
||||
};
|
||||
}
|
||||
|
||||
function createRequest(config: AtlassianConfig, method: "GET" | "POST" | "PUT", path: string, body?: unknown) {
|
||||
const url = new URL(path, `${config.jiraBaseUrl}/`);
|
||||
|
||||
return {
|
||||
method,
|
||||
url: url.toString(),
|
||||
...(body === undefined ? {} : { body }),
|
||||
};
|
||||
}
|
||||
|
||||
export function createJiraClient(options: JiraClientOptions) {
|
||||
const fetchImpl = options.fetchImpl ?? globalThis.fetch;
|
||||
|
||||
if (!fetchImpl) {
|
||||
throw new Error("Fetch API is not available in this runtime");
|
||||
}
|
||||
|
||||
async function send(method: "GET" | "POST" | "PUT", path: string, body?: unknown) {
|
||||
const request = createRequest(options.config, method, path, body);
|
||||
return sendJsonRequest({
|
||||
config: options.config,
|
||||
fetchImpl,
|
||||
url: request.url,
|
||||
method,
|
||||
body,
|
||||
errorPrefix: "Jira request failed",
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
async searchIssues(input: SearchInput): Promise<CommandOutput<unknown>> {
|
||||
const raw = (await send("POST", "/rest/api/3/search", {
|
||||
jql: input.jql,
|
||||
maxResults: input.maxResults,
|
||||
startAt: input.startAt,
|
||||
fields: [...ISSUE_FIELDS],
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const issues = Array.isArray(raw.issues) ? raw.issues : [];
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issues: issues.map((issue) => normalizeIssue(options.config, issue as Record<string, unknown>)),
|
||||
startAt: Number(raw.startAt ?? input.startAt),
|
||||
maxResults: Number(raw.maxResults ?? input.maxResults),
|
||||
total: Number(raw.total ?? issues.length),
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async getIssue(issue: string): Promise<CommandOutput<unknown>> {
|
||||
const url = new URL(`/rest/api/3/issue/${issue}`, `${options.config.jiraBaseUrl}/`);
|
||||
url.searchParams.set("fields", ISSUE_FIELDS.join(","));
|
||||
|
||||
const raw = (await send("GET", `${url.pathname}${url.search}`)) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issue: normalizeIssue(options.config, raw),
|
||||
},
|
||||
raw,
|
||||
};
|
||||
},
|
||||
|
||||
async getTransitions(issue: string): Promise<CommandOutput<unknown>> {
|
||||
const raw = (await send(
|
||||
"GET",
|
||||
`/rest/api/3/issue/${issue}/transitions`,
|
||||
)) as { transitions?: Array<Record<string, unknown>> };
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
transitions: (raw.transitions ?? []).map((transition) => ({
|
||||
id: String(transition.id ?? ""),
|
||||
name: String(transition.name ?? ""),
|
||||
toStatus: String(((transition.to ?? {}) as Record<string, unknown>).name ?? ""),
|
||||
hasScreen: Boolean(transition.hasScreen),
|
||||
})),
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async createIssue(input: CreateInput): Promise<CommandOutput<unknown>> {
|
||||
const project = input.project || options.config.defaultProject;
|
||||
|
||||
if (!project) {
|
||||
throw new Error("jira-create requires --project or ATLASSIAN_DEFAULT_PROJECT");
|
||||
}
|
||||
|
||||
const request = createRequest(options.config, "POST", "/rest/api/3/issue", {
|
||||
fields: {
|
||||
project: { key: project },
|
||||
issuetype: { name: input.type },
|
||||
summary: input.summary,
|
||||
...(input.description ? { description: markdownToAdf(input.description) } : {}),
|
||||
},
|
||||
});
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await send("POST", "/rest/api/3/issue", request.body);
|
||||
return { ok: true, data: raw };
|
||||
},
|
||||
|
||||
async updateIssue(input: UpdateInput): Promise<CommandOutput<unknown>> {
|
||||
const fields: Record<string, unknown> = {};
|
||||
|
||||
if (input.summary) {
|
||||
fields.summary = input.summary;
|
||||
}
|
||||
|
||||
if (input.description) {
|
||||
fields.description = markdownToAdf(input.description);
|
||||
}
|
||||
|
||||
if (Object.keys(fields).length === 0) {
|
||||
throw new Error("jira-update requires --summary and/or --description-file");
|
||||
}
|
||||
|
||||
const request = createRequest(options.config, "PUT", `/rest/api/3/issue/${input.issue}`, {
|
||||
fields,
|
||||
});
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
await send("PUT", `/rest/api/3/issue/${input.issue}`, request.body);
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issue: input.issue,
|
||||
updated: true,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async commentIssue(input: CommentInput): Promise<CommandOutput<unknown>> {
|
||||
const request = createRequest(options.config, "POST", `/rest/api/3/issue/${input.issue}/comment`, {
|
||||
body: markdownToAdf(input.body),
|
||||
});
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await send("POST", `/rest/api/3/issue/${input.issue}/comment`, request.body);
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
|
||||
async transitionIssue(input: TransitionInput): Promise<CommandOutput<unknown>> {
|
||||
const request = createRequest(
|
||||
options.config,
|
||||
"POST",
|
||||
`/rest/api/3/issue/${input.issue}/transitions`,
|
||||
{
|
||||
transition: {
|
||||
id: input.transition,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
await send("POST", `/rest/api/3/issue/${input.issue}/transitions`, request.body);
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issue: input.issue,
|
||||
transitioned: true,
|
||||
transition: input.transition,
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
44
skills/atlassian/claude-code/scripts/src/output.ts
Normal file
44
skills/atlassian/claude-code/scripts/src/output.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import type { CommandOutput, OutputFormat, Writer } from "./types.js";
|
||||
|
||||
function renderText(payload: CommandOutput<unknown>) {
|
||||
const data = payload.data as Record<string, unknown>;
|
||||
|
||||
if (Array.isArray(data?.issues)) {
|
||||
return data.issues
|
||||
.map((issue) => {
|
||||
const item = issue as Record<string, string>;
|
||||
return `${item.key} [${item.status}] ${item.issueType} - ${item.summary}`;
|
||||
})
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
if (data?.issue && typeof data.issue === "object") {
|
||||
const issue = data.issue as Record<string, string>;
|
||||
return [
|
||||
issue.key,
|
||||
`${issue.issueType} | ${issue.status}`,
|
||||
issue.summary,
|
||||
issue.url,
|
||||
].join("\n");
|
||||
}
|
||||
|
||||
if (Array.isArray(data?.transitions)) {
|
||||
return data.transitions
|
||||
.map((transition) => {
|
||||
const item = transition as Record<string, string>;
|
||||
return `${item.id} ${item.name} -> ${item.toStatus}`;
|
||||
})
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
return JSON.stringify(payload, null, 2);
|
||||
}
|
||||
|
||||
export function writeOutput(
|
||||
writer: Writer,
|
||||
payload: CommandOutput<unknown>,
|
||||
format: OutputFormat = "json",
|
||||
) {
|
||||
const body = format === "text" ? renderText(payload) : JSON.stringify(payload, null, 2);
|
||||
writer.write(`${body}\n`);
|
||||
}
|
||||
85
skills/atlassian/claude-code/scripts/src/raw.ts
Normal file
85
skills/atlassian/claude-code/scripts/src/raw.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { readWorkspaceFile } from "./files.js";
|
||||
import { sendJsonRequest } from "./http.js";
|
||||
import type { AtlassianConfig, CommandOutput, FetchLike } from "./types.js";
|
||||
|
||||
const JIRA_ALLOWED_PREFIXES = ["/rest/api/3/"] as const;
|
||||
const CONFLUENCE_ALLOWED_PREFIXES = ["/wiki/api/v2/", "/wiki/rest/api/"] as const;
|
||||
|
||||
type RawInput = {
|
||||
product: "jira" | "confluence";
|
||||
method: string;
|
||||
path: string;
|
||||
bodyFile?: string;
|
||||
cwd: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
function getAllowedPrefixes(product: RawInput["product"]) {
|
||||
return product === "jira" ? JIRA_ALLOWED_PREFIXES : CONFLUENCE_ALLOWED_PREFIXES;
|
||||
}
|
||||
|
||||
function buildUrl(config: AtlassianConfig, product: RawInput["product"], path: string) {
|
||||
const baseUrl = product === "jira" ? config.jiraBaseUrl : config.confluenceBaseUrl;
|
||||
return new URL(path, `${baseUrl}/`).toString();
|
||||
}
|
||||
|
||||
function validateMethod(method: string): asserts method is "GET" | "POST" | "PUT" {
|
||||
if (!["GET", "POST", "PUT"].includes(method)) {
|
||||
throw new Error("raw only allows GET, POST, and PUT");
|
||||
}
|
||||
}
|
||||
|
||||
function validatePath(product: RawInput["product"], path: string) {
|
||||
const allowedPrefixes = getAllowedPrefixes(product);
|
||||
|
||||
if (!allowedPrefixes.some((prefix) => path.startsWith(prefix))) {
|
||||
throw new Error(`raw path is not allowed for ${product}: ${path}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function readRawBody(bodyFile: string | undefined, cwd: string) {
|
||||
if (!bodyFile) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const contents = await readWorkspaceFile(bodyFile, cwd);
|
||||
return JSON.parse(contents) as unknown;
|
||||
}
|
||||
|
||||
export async function runRawCommand(
|
||||
config: AtlassianConfig,
|
||||
fetchImpl: FetchLike | undefined,
|
||||
input: RawInput,
|
||||
): Promise<CommandOutput<unknown>> {
|
||||
validateMethod(input.method);
|
||||
validatePath(input.product, input.path);
|
||||
|
||||
const body = await readRawBody(input.bodyFile, input.cwd);
|
||||
const request = {
|
||||
method: input.method,
|
||||
url: buildUrl(config, input.product, input.path),
|
||||
...(body === undefined ? {} : { body }),
|
||||
};
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const data = await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl,
|
||||
url: request.url,
|
||||
method: input.method,
|
||||
body,
|
||||
errorPrefix: "Raw request failed",
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data,
|
||||
};
|
||||
}
|
||||
35
skills/atlassian/claude-code/scripts/src/types.ts
Normal file
35
skills/atlassian/claude-code/scripts/src/types.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
export type AtlassianConfig = {
|
||||
baseUrl: string;
|
||||
jiraBaseUrl: string;
|
||||
confluenceBaseUrl: string;
|
||||
email: string;
|
||||
apiToken: string;
|
||||
defaultProject?: string;
|
||||
defaultSpace?: string;
|
||||
};
|
||||
|
||||
export type CommandOutput<T> = {
|
||||
ok: true;
|
||||
data: T;
|
||||
dryRun?: boolean;
|
||||
raw?: unknown;
|
||||
};
|
||||
|
||||
export type JiraIssueSummary = {
|
||||
key: string;
|
||||
summary: string;
|
||||
issueType: string;
|
||||
status: string;
|
||||
assignee?: string;
|
||||
created: string;
|
||||
updated: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
export type Writer = {
|
||||
write(chunk: string | Uint8Array): unknown;
|
||||
};
|
||||
|
||||
export type FetchLike = typeof fetch;
|
||||
|
||||
export type OutputFormat = "json" | "text";
|
||||
15
skills/atlassian/claude-code/scripts/tsconfig.json
Normal file
15
skills/atlassian/claude-code/scripts/tsconfig.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"resolveJsonModule": true,
|
||||
"types": ["node"],
|
||||
"outDir": "dist"
|
||||
},
|
||||
"include": ["src/**/*.ts", "scripts/**/*.ts", "tests/**/*.ts"]
|
||||
}
|
||||
20
skills/atlassian/codex/scripts/package.json
Normal file
20
skills/atlassian/codex/scripts/package.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"name": "atlassian-skill-scripts",
|
||||
"version": "1.0.0",
|
||||
"description": "Shared runtime for the Atlassian skill",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"atlassian": "tsx src/cli.ts",
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"commander": "^13.1.0",
|
||||
"dotenv": "^16.4.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.3.0",
|
||||
"tsx": "^4.20.5",
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"packageManager": "pnpm@10.18.1+sha512.77a884a165cbba2d8d1c19e3b4880eee6d2fcabd0d879121e282196b80042351d5eb3ca0935fa599da1dc51265cc68816ad2bddd2a2de5ea9fdf92adbec7cd34"
|
||||
}
|
||||
361
skills/atlassian/codex/scripts/pnpm-lock.yaml
generated
Normal file
361
skills/atlassian/codex/scripts/pnpm-lock.yaml
generated
Normal file
@@ -0,0 +1,361 @@
|
||||
lockfileVersion: '9.0'
|
||||
|
||||
settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
commander:
|
||||
specifier: ^13.1.0
|
||||
version: 13.1.0
|
||||
dotenv:
|
||||
specifier: ^16.4.7
|
||||
version: 16.6.1
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^24.3.0
|
||||
version: 24.12.0
|
||||
tsx:
|
||||
specifier: ^4.20.5
|
||||
version: 4.21.0
|
||||
typescript:
|
||||
specifier: ^5.9.2
|
||||
version: 5.9.3
|
||||
|
||||
packages:
|
||||
|
||||
'@esbuild/aix-ppc64@0.27.3':
|
||||
resolution: {integrity: sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ppc64]
|
||||
os: [aix]
|
||||
|
||||
'@esbuild/android-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/android-arm@0.27.3':
|
||||
resolution: {integrity: sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/android-x64@0.27.3':
|
||||
resolution: {integrity: sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/darwin-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [darwin]
|
||||
|
||||
'@esbuild/darwin-x64@0.27.3':
|
||||
resolution: {integrity: sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [darwin]
|
||||
|
||||
'@esbuild/freebsd-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [freebsd]
|
||||
|
||||
'@esbuild/freebsd-x64@0.27.3':
|
||||
resolution: {integrity: sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [freebsd]
|
||||
|
||||
'@esbuild/linux-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-arm@0.27.3':
|
||||
resolution: {integrity: sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-ia32@0.27.3':
|
||||
resolution: {integrity: sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ia32]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-loong64@0.27.3':
|
||||
resolution: {integrity: sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [loong64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-mips64el@0.27.3':
|
||||
resolution: {integrity: sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [mips64el]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-ppc64@0.27.3':
|
||||
resolution: {integrity: sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ppc64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-riscv64@0.27.3':
|
||||
resolution: {integrity: sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [riscv64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-s390x@0.27.3':
|
||||
resolution: {integrity: sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [s390x]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-x64@0.27.3':
|
||||
resolution: {integrity: sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/netbsd-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [netbsd]
|
||||
|
||||
'@esbuild/netbsd-x64@0.27.3':
|
||||
resolution: {integrity: sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [netbsd]
|
||||
|
||||
'@esbuild/openbsd-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [openbsd]
|
||||
|
||||
'@esbuild/openbsd-x64@0.27.3':
|
||||
resolution: {integrity: sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [openbsd]
|
||||
|
||||
'@esbuild/openharmony-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [openharmony]
|
||||
|
||||
'@esbuild/sunos-x64@0.27.3':
|
||||
resolution: {integrity: sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [sunos]
|
||||
|
||||
'@esbuild/win32-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [win32]
|
||||
|
||||
'@esbuild/win32-ia32@0.27.3':
|
||||
resolution: {integrity: sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ia32]
|
||||
os: [win32]
|
||||
|
||||
'@esbuild/win32-x64@0.27.3':
|
||||
resolution: {integrity: sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [win32]
|
||||
|
||||
'@types/node@24.12.0':
|
||||
resolution: {integrity: sha512-GYDxsZi3ChgmckRT9HPU0WEhKLP08ev/Yfcq2AstjrDASOYCSXeyjDsHg4v5t4jOj7cyDX3vmprafKlWIG9MXQ==}
|
||||
|
||||
commander@13.1.0:
|
||||
resolution: {integrity: sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
dotenv@16.6.1:
|
||||
resolution: {integrity: sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
esbuild@0.27.3:
|
||||
resolution: {integrity: sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==}
|
||||
engines: {node: '>=18'}
|
||||
hasBin: true
|
||||
|
||||
fsevents@2.3.3:
|
||||
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
|
||||
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
|
||||
os: [darwin]
|
||||
|
||||
get-tsconfig@4.13.6:
|
||||
resolution: {integrity: sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==}
|
||||
|
||||
resolve-pkg-maps@1.0.0:
|
||||
resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==}
|
||||
|
||||
tsx@4.21.0:
|
||||
resolution: {integrity: sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==}
|
||||
engines: {node: '>=18.0.0'}
|
||||
hasBin: true
|
||||
|
||||
typescript@5.9.3:
|
||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
||||
engines: {node: '>=14.17'}
|
||||
hasBin: true
|
||||
|
||||
undici-types@7.16.0:
|
||||
resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==}
|
||||
|
||||
snapshots:
|
||||
|
||||
'@esbuild/aix-ppc64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-arm@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/darwin-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/darwin-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/freebsd-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/freebsd-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-arm@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-ia32@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-loong64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-mips64el@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-ppc64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-riscv64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-s390x@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/netbsd-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/netbsd-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openbsd-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openbsd-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openharmony-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/sunos-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-ia32@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@types/node@24.12.0':
|
||||
dependencies:
|
||||
undici-types: 7.16.0
|
||||
|
||||
commander@13.1.0: {}
|
||||
|
||||
dotenv@16.6.1: {}
|
||||
|
||||
esbuild@0.27.3:
|
||||
optionalDependencies:
|
||||
'@esbuild/aix-ppc64': 0.27.3
|
||||
'@esbuild/android-arm': 0.27.3
|
||||
'@esbuild/android-arm64': 0.27.3
|
||||
'@esbuild/android-x64': 0.27.3
|
||||
'@esbuild/darwin-arm64': 0.27.3
|
||||
'@esbuild/darwin-x64': 0.27.3
|
||||
'@esbuild/freebsd-arm64': 0.27.3
|
||||
'@esbuild/freebsd-x64': 0.27.3
|
||||
'@esbuild/linux-arm': 0.27.3
|
||||
'@esbuild/linux-arm64': 0.27.3
|
||||
'@esbuild/linux-ia32': 0.27.3
|
||||
'@esbuild/linux-loong64': 0.27.3
|
||||
'@esbuild/linux-mips64el': 0.27.3
|
||||
'@esbuild/linux-ppc64': 0.27.3
|
||||
'@esbuild/linux-riscv64': 0.27.3
|
||||
'@esbuild/linux-s390x': 0.27.3
|
||||
'@esbuild/linux-x64': 0.27.3
|
||||
'@esbuild/netbsd-arm64': 0.27.3
|
||||
'@esbuild/netbsd-x64': 0.27.3
|
||||
'@esbuild/openbsd-arm64': 0.27.3
|
||||
'@esbuild/openbsd-x64': 0.27.3
|
||||
'@esbuild/openharmony-arm64': 0.27.3
|
||||
'@esbuild/sunos-x64': 0.27.3
|
||||
'@esbuild/win32-arm64': 0.27.3
|
||||
'@esbuild/win32-ia32': 0.27.3
|
||||
'@esbuild/win32-x64': 0.27.3
|
||||
|
||||
fsevents@2.3.3:
|
||||
optional: true
|
||||
|
||||
get-tsconfig@4.13.6:
|
||||
dependencies:
|
||||
resolve-pkg-maps: 1.0.0
|
||||
|
||||
resolve-pkg-maps@1.0.0: {}
|
||||
|
||||
tsx@4.21.0:
|
||||
dependencies:
|
||||
esbuild: 0.27.3
|
||||
get-tsconfig: 4.13.6
|
||||
optionalDependencies:
|
||||
fsevents: 2.3.3
|
||||
|
||||
typescript@5.9.3: {}
|
||||
|
||||
undici-types@7.16.0: {}
|
||||
92
skills/atlassian/codex/scripts/src/adf.ts
Normal file
92
skills/atlassian/codex/scripts/src/adf.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
const TEXT_NODE = "text";
|
||||
|
||||
function textNode(text: string) {
|
||||
return {
|
||||
type: TEXT_NODE,
|
||||
text,
|
||||
};
|
||||
}
|
||||
|
||||
function paragraphNode(lines: string[]) {
|
||||
const content: Array<{ type: string; text?: string }> = [];
|
||||
|
||||
lines.forEach((line, index) => {
|
||||
if (index > 0) {
|
||||
content.push({ type: "hardBreak" });
|
||||
}
|
||||
|
||||
if (line.length > 0) {
|
||||
content.push(textNode(line));
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
type: "paragraph",
|
||||
...(content.length > 0 ? { content } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
export function markdownToAdf(input: string) {
|
||||
const lines = input.replace(/\r\n/g, "\n").split("\n");
|
||||
const content: Array<Record<string, unknown>> = [];
|
||||
let index = 0;
|
||||
|
||||
while (index < lines.length) {
|
||||
const current = lines[index]?.trimEnd() ?? "";
|
||||
|
||||
if (current.trim().length === 0) {
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
const heading = current.match(/^(#{1,6})\s+(.*)$/);
|
||||
|
||||
if (heading) {
|
||||
content.push({
|
||||
type: "heading",
|
||||
attrs: { level: heading[1].length },
|
||||
content: [textNode(heading[2])],
|
||||
});
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (/^[-*]\s+/.test(current)) {
|
||||
const items: Array<Record<string, unknown>> = [];
|
||||
|
||||
while (index < lines.length && /^[-*]\s+/.test(lines[index] ?? "")) {
|
||||
items.push({
|
||||
type: "listItem",
|
||||
content: [
|
||||
{
|
||||
type: "paragraph",
|
||||
content: [textNode((lines[index] ?? "").replace(/^[-*]\s+/, ""))],
|
||||
},
|
||||
],
|
||||
});
|
||||
index += 1;
|
||||
}
|
||||
|
||||
content.push({
|
||||
type: "bulletList",
|
||||
content: items,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
const paragraphLines: string[] = [];
|
||||
|
||||
while (index < lines.length && (lines[index]?.trim().length ?? 0) > 0) {
|
||||
paragraphLines.push(lines[index] ?? "");
|
||||
index += 1;
|
||||
}
|
||||
|
||||
content.push(paragraphNode(paragraphLines));
|
||||
}
|
||||
|
||||
return {
|
||||
type: "doc",
|
||||
version: 1,
|
||||
content,
|
||||
};
|
||||
}
|
||||
339
skills/atlassian/codex/scripts/src/cli.ts
Normal file
339
skills/atlassian/codex/scripts/src/cli.ts
Normal file
@@ -0,0 +1,339 @@
|
||||
import process from "node:process";
|
||||
import { pathToFileURL } from "node:url";
|
||||
|
||||
import { Command } from "commander";
|
||||
|
||||
import { createConfluenceClient } from "./confluence.js";
|
||||
import { loadConfig } from "./config.js";
|
||||
import { readWorkspaceFile } from "./files.js";
|
||||
import { createJiraClient } from "./jira.js";
|
||||
import { writeOutput } from "./output.js";
|
||||
import { runRawCommand } from "./raw.js";
|
||||
import type { FetchLike, OutputFormat, Writer } from "./types.js";
|
||||
|
||||
type CliContext = {
|
||||
cwd?: string;
|
||||
env?: NodeJS.ProcessEnv;
|
||||
fetchImpl?: FetchLike;
|
||||
stdout?: Writer;
|
||||
stderr?: Writer;
|
||||
};
|
||||
|
||||
function resolveFormat(format: string | undefined): OutputFormat {
|
||||
return format === "text" ? "text" : "json";
|
||||
}
|
||||
|
||||
function createRuntime(context: CliContext) {
|
||||
const cwd = context.cwd ?? process.cwd();
|
||||
const env = context.env ?? process.env;
|
||||
const stdout = context.stdout ?? process.stdout;
|
||||
const stderr = context.stderr ?? process.stderr;
|
||||
let configCache: ReturnType<typeof loadConfig> | undefined;
|
||||
let jiraCache: ReturnType<typeof createJiraClient> | undefined;
|
||||
let confluenceCache: ReturnType<typeof createConfluenceClient> | undefined;
|
||||
|
||||
function getConfig() {
|
||||
configCache ??= loadConfig(env, { cwd });
|
||||
return configCache;
|
||||
}
|
||||
|
||||
function getJiraClient() {
|
||||
jiraCache ??= createJiraClient({
|
||||
config: getConfig(),
|
||||
fetchImpl: context.fetchImpl,
|
||||
});
|
||||
return jiraCache;
|
||||
}
|
||||
|
||||
function getConfluenceClient() {
|
||||
confluenceCache ??= createConfluenceClient({
|
||||
config: getConfig(),
|
||||
fetchImpl: context.fetchImpl,
|
||||
});
|
||||
return confluenceCache;
|
||||
}
|
||||
|
||||
async function readBodyFile(filePath: string | undefined) {
|
||||
if (!filePath) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return readWorkspaceFile(filePath, cwd);
|
||||
}
|
||||
|
||||
return {
|
||||
cwd,
|
||||
stdout,
|
||||
stderr,
|
||||
readBodyFile,
|
||||
getConfig,
|
||||
getJiraClient,
|
||||
getConfluenceClient,
|
||||
fetchImpl: context.fetchImpl,
|
||||
};
|
||||
}
|
||||
|
||||
export function buildProgram(context: CliContext = {}) {
|
||||
const runtime = createRuntime(context);
|
||||
const program = new Command()
|
||||
.name("atlassian")
|
||||
.description("Portable Atlassian CLI for multi-agent skills")
|
||||
.version("0.1.0");
|
||||
|
||||
program
|
||||
.command("health")
|
||||
.description("Validate configuration and Atlassian connectivity")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action((options) => {
|
||||
writeOutput(
|
||||
runtime.stdout,
|
||||
{
|
||||
ok: true,
|
||||
data: {
|
||||
baseUrl: runtime.getConfig().baseUrl,
|
||||
jiraBaseUrl: runtime.getConfig().jiraBaseUrl,
|
||||
confluenceBaseUrl: runtime.getConfig().confluenceBaseUrl,
|
||||
defaultProject: runtime.getConfig().defaultProject,
|
||||
defaultSpace: runtime.getConfig().defaultSpace,
|
||||
},
|
||||
},
|
||||
resolveFormat(options.format),
|
||||
);
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-search")
|
||||
.requiredOption("--query <query>", "CQL search query")
|
||||
.option("--max-results <number>", "Maximum results to return", "50")
|
||||
.option("--start-at <number>", "Result offset", "0")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().searchPages({
|
||||
query: options.query,
|
||||
maxResults: Number(options.maxResults),
|
||||
startAt: Number(options.startAt),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-get")
|
||||
.requiredOption("--page <page>", "Confluence page ID")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().getPage(options.page);
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-create")
|
||||
.requiredOption("--title <title>", "Confluence page title")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative storage-format body file")
|
||||
.option("--space <space>", "Confluence space ID")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().createPage({
|
||||
space: options.space,
|
||||
title: options.title,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-update")
|
||||
.requiredOption("--page <page>", "Confluence page ID")
|
||||
.requiredOption("--title <title>", "Confluence page title")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative storage-format body file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().updatePage({
|
||||
pageId: options.page,
|
||||
title: options.title,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-comment")
|
||||
.requiredOption("--page <page>", "Confluence page ID")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative storage-format body file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().commentPage({
|
||||
pageId: options.page,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-children")
|
||||
.requiredOption("--page <page>", "Confluence page ID")
|
||||
.option("--max-results <number>", "Maximum results to return", "50")
|
||||
.option("--start-at <number>", "Cursor/start token", "0")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().listChildren(
|
||||
options.page,
|
||||
Number(options.maxResults),
|
||||
Number(options.startAt),
|
||||
);
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("raw")
|
||||
.requiredOption("--product <product>", "jira or confluence")
|
||||
.requiredOption("--method <method>", "GET, POST, or PUT")
|
||||
.requiredOption("--path <path>", "Validated API path")
|
||||
.option("--body-file <path>", "Workspace-relative JSON file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runRawCommand(runtime.getConfig(), runtime.fetchImpl, {
|
||||
product: options.product,
|
||||
method: String(options.method).toUpperCase(),
|
||||
path: options.path,
|
||||
bodyFile: options.bodyFile,
|
||||
cwd: runtime.cwd,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-search")
|
||||
.requiredOption("--jql <jql>", "JQL expression to execute")
|
||||
.option("--max-results <number>", "Maximum results to return", "50")
|
||||
.option("--start-at <number>", "Result offset", "0")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().searchIssues({
|
||||
jql: options.jql,
|
||||
maxResults: Number(options.maxResults),
|
||||
startAt: Number(options.startAt),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-get")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().getIssue(options.issue);
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-create")
|
||||
.requiredOption("--type <type>", "Issue type name")
|
||||
.requiredOption("--summary <summary>", "Issue summary")
|
||||
.option("--project <project>", "Project key")
|
||||
.option("--description-file <path>", "Workspace-relative markdown/text file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().createIssue({
|
||||
project: options.project,
|
||||
type: options.type,
|
||||
summary: options.summary,
|
||||
description: await runtime.readBodyFile(options.descriptionFile),
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-update")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.option("--summary <summary>", "Updated summary")
|
||||
.option("--description-file <path>", "Workspace-relative markdown/text file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().updateIssue({
|
||||
issue: options.issue,
|
||||
summary: options.summary,
|
||||
description: await runtime.readBodyFile(options.descriptionFile),
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-comment")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative markdown/text file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().commentIssue({
|
||||
issue: options.issue,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-transitions")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().getTransitions(options.issue);
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-transition")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.requiredOption("--transition <transition>", "Transition ID")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().transitionIssue({
|
||||
issue: options.issue,
|
||||
transition: options.transition,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
return program;
|
||||
}
|
||||
|
||||
export async function runCli(argv = process.argv, context: CliContext = {}) {
|
||||
const program = buildProgram(context);
|
||||
await program.parseAsync(argv);
|
||||
}
|
||||
|
||||
const isDirectExecution =
|
||||
Boolean(process.argv[1]) && import.meta.url === pathToFileURL(process.argv[1]).href;
|
||||
|
||||
if (isDirectExecution) {
|
||||
runCli().catch((error: unknown) => {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
process.stderr.write(`${message}\n`);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
}
|
||||
52
skills/atlassian/codex/scripts/src/config.ts
Normal file
52
skills/atlassian/codex/scripts/src/config.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import path from "node:path";
|
||||
|
||||
import { config as loadDotEnv } from "dotenv";
|
||||
|
||||
import type { AtlassianConfig } from "./types.js";
|
||||
|
||||
function normalizeBaseUrl(value: string) {
|
||||
return value.replace(/\/+$/, "");
|
||||
}
|
||||
|
||||
function readRequired(env: NodeJS.ProcessEnv, key: string) {
|
||||
const value = env[key]?.trim();
|
||||
|
||||
if (!value) {
|
||||
throw new Error(`Missing required environment variable: ${key}`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
export function loadConfig(
|
||||
env: NodeJS.ProcessEnv = process.env,
|
||||
options?: {
|
||||
cwd?: string;
|
||||
},
|
||||
): AtlassianConfig {
|
||||
loadDotEnv({
|
||||
path: path.resolve(options?.cwd ?? process.cwd(), ".env"),
|
||||
processEnv: env as Record<string, string>,
|
||||
override: false,
|
||||
});
|
||||
|
||||
const baseUrl = normalizeBaseUrl(readRequired(env, "ATLASSIAN_BASE_URL"));
|
||||
|
||||
return {
|
||||
baseUrl,
|
||||
jiraBaseUrl: normalizeBaseUrl(env.ATLASSIAN_JIRA_BASE_URL?.trim() || baseUrl),
|
||||
confluenceBaseUrl: normalizeBaseUrl(env.ATLASSIAN_CONFLUENCE_BASE_URL?.trim() || baseUrl),
|
||||
email: readRequired(env, "ATLASSIAN_EMAIL"),
|
||||
apiToken: readRequired(env, "ATLASSIAN_API_TOKEN"),
|
||||
defaultProject: env.ATLASSIAN_DEFAULT_PROJECT?.trim() || undefined,
|
||||
defaultSpace: env.ATLASSIAN_DEFAULT_SPACE?.trim() || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
export function createBasicAuthHeader(config: {
|
||||
email: string;
|
||||
apiToken: string;
|
||||
[key: string]: unknown;
|
||||
}) {
|
||||
return `Basic ${Buffer.from(`${config.email}:${config.apiToken}`).toString("base64")}`;
|
||||
}
|
||||
292
skills/atlassian/codex/scripts/src/confluence.ts
Normal file
292
skills/atlassian/codex/scripts/src/confluence.ts
Normal file
@@ -0,0 +1,292 @@
|
||||
import { sendJsonRequest } from "./http.js";
|
||||
import type { AtlassianConfig, CommandOutput, FetchLike } from "./types.js";
|
||||
|
||||
type ConfluenceClientOptions = {
|
||||
config: AtlassianConfig;
|
||||
fetchImpl?: FetchLike;
|
||||
};
|
||||
|
||||
type SearchInput = {
|
||||
query: string;
|
||||
maxResults: number;
|
||||
startAt: number;
|
||||
};
|
||||
|
||||
type CreateInput = {
|
||||
space?: string;
|
||||
title: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type UpdateInput = {
|
||||
pageId: string;
|
||||
title: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type CommentInput = {
|
||||
pageId: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type PageSummary = {
|
||||
id: string;
|
||||
title: string;
|
||||
type: string;
|
||||
status?: string;
|
||||
spaceId?: string;
|
||||
url?: string;
|
||||
};
|
||||
|
||||
function buildUrl(baseUrl: string, path: string) {
|
||||
return new URL(path, `${baseUrl}/`).toString();
|
||||
}
|
||||
|
||||
function normalizePage(baseUrl: string, page: Record<string, unknown>, excerpt?: string) {
|
||||
const links = (page._links ?? {}) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
id: String(page.id ?? ""),
|
||||
title: String(page.title ?? ""),
|
||||
type: String(page.type ?? "page"),
|
||||
...(page.status ? { status: String(page.status) } : {}),
|
||||
...(page.spaceId ? { spaceId: String(page.spaceId) } : {}),
|
||||
...(excerpt ? { excerpt } : {}),
|
||||
...(links.webui ? { url: `${baseUrl}${String(links.webui)}` } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
export function createConfluenceClient(options: ConfluenceClientOptions) {
|
||||
const config = options.config;
|
||||
|
||||
async function getPageForUpdate(pageId: string) {
|
||||
return (await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: buildUrl(config.confluenceBaseUrl, `/wiki/api/v2/pages/${pageId}?body-format=storage`),
|
||||
method: "GET",
|
||||
errorPrefix: "Confluence request failed",
|
||||
})) as Record<string, unknown>;
|
||||
}
|
||||
|
||||
return {
|
||||
async searchPages(input: SearchInput): Promise<CommandOutput<unknown>> {
|
||||
const url = new URL("/wiki/rest/api/search", `${config.confluenceBaseUrl}/`);
|
||||
url.searchParams.set("cql", input.query);
|
||||
url.searchParams.set("limit", String(input.maxResults));
|
||||
url.searchParams.set("start", String(input.startAt));
|
||||
|
||||
const raw = (await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: url.toString(),
|
||||
method: "GET",
|
||||
errorPrefix: "Confluence request failed",
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const results = Array.isArray(raw.results) ? raw.results : [];
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
pages: results.map((entry) => {
|
||||
const result = entry as Record<string, unknown>;
|
||||
return normalizePage(
|
||||
config.baseUrl,
|
||||
(result.content ?? {}) as Record<string, unknown>,
|
||||
result.excerpt ? String(result.excerpt) : undefined,
|
||||
);
|
||||
}),
|
||||
startAt: Number(raw.start ?? input.startAt),
|
||||
maxResults: Number(raw.limit ?? input.maxResults),
|
||||
total: Number(raw.totalSize ?? raw.size ?? results.length),
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async getPage(pageId: string): Promise<CommandOutput<unknown>> {
|
||||
const raw = (await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: buildUrl(config.confluenceBaseUrl, `/wiki/api/v2/pages/${pageId}?body-format=storage`),
|
||||
method: "GET",
|
||||
errorPrefix: "Confluence request failed",
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const body = ((raw.body ?? {}) as Record<string, unknown>).storage as Record<string, unknown> | undefined;
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
page: {
|
||||
...normalizePage(config.baseUrl, raw),
|
||||
version: Number((((raw.version ?? {}) as Record<string, unknown>).number ?? 0)),
|
||||
body: body?.value ? String(body.value) : "",
|
||||
},
|
||||
},
|
||||
raw,
|
||||
};
|
||||
},
|
||||
|
||||
async listChildren(pageId: string, maxResults: number, startAt: number): Promise<CommandOutput<unknown>> {
|
||||
const url = new URL(`/wiki/api/v2/pages/${pageId}/direct-children`, `${config.confluenceBaseUrl}/`);
|
||||
url.searchParams.set("limit", String(maxResults));
|
||||
url.searchParams.set("cursor", String(startAt));
|
||||
|
||||
const raw = (await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: url.toString(),
|
||||
method: "GET",
|
||||
errorPrefix: "Confluence request failed",
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const results = Array.isArray(raw.results) ? raw.results : [];
|
||||
const links = (raw._links ?? {}) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
pages: results.map((page) => normalizePage(config.baseUrl, page as Record<string, unknown>)),
|
||||
nextCursor: links.next ? String(links.next) : null,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async createPage(input: CreateInput): Promise<CommandOutput<unknown>> {
|
||||
const spaceId = input.space || config.defaultSpace;
|
||||
|
||||
if (!spaceId) {
|
||||
throw new Error("conf-create requires --space or ATLASSIAN_DEFAULT_SPACE");
|
||||
}
|
||||
|
||||
const request = {
|
||||
method: "POST" as const,
|
||||
url: buildUrl(config.confluenceBaseUrl, "/wiki/api/v2/pages"),
|
||||
body: {
|
||||
spaceId,
|
||||
title: input.title,
|
||||
status: "current",
|
||||
body: {
|
||||
representation: "storage",
|
||||
value: input.body,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: request.url,
|
||||
method: request.method,
|
||||
body: request.body,
|
||||
errorPrefix: "Confluence request failed",
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
|
||||
async updatePage(input: UpdateInput): Promise<CommandOutput<unknown>> {
|
||||
const currentPage = await getPageForUpdate(input.pageId);
|
||||
const version = (((currentPage.version ?? {}) as Record<string, unknown>).number ?? 0) as number;
|
||||
const spaceId = String(currentPage.spaceId ?? "");
|
||||
|
||||
const request = {
|
||||
method: "PUT" as const,
|
||||
url: buildUrl(config.confluenceBaseUrl, `/wiki/api/v2/pages/${input.pageId}`),
|
||||
body: {
|
||||
id: input.pageId,
|
||||
status: String(currentPage.status ?? "current"),
|
||||
title: input.title,
|
||||
spaceId,
|
||||
version: {
|
||||
number: Number(version) + 1,
|
||||
},
|
||||
body: {
|
||||
representation: "storage",
|
||||
value: input.body,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: request.url,
|
||||
method: request.method,
|
||||
body: request.body,
|
||||
errorPrefix: "Confluence request failed",
|
||||
handleResponseError(response) {
|
||||
if (response.status === 409) {
|
||||
return new Error(`Confluence update conflict: page ${input.pageId} was updated by someone else`);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
|
||||
async commentPage(input: CommentInput): Promise<CommandOutput<unknown>> {
|
||||
const request = {
|
||||
method: "POST" as const,
|
||||
url: buildUrl(config.confluenceBaseUrl, "/wiki/api/v2/footer-comments"),
|
||||
body: {
|
||||
pageId: input.pageId,
|
||||
body: {
|
||||
representation: "storage",
|
||||
value: input.body,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: request.url,
|
||||
method: request.method,
|
||||
body: request.body,
|
||||
errorPrefix: "Confluence request failed",
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
13
skills/atlassian/codex/scripts/src/files.ts
Normal file
13
skills/atlassian/codex/scripts/src/files.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { readFile } from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
|
||||
export async function readWorkspaceFile(filePath: string, cwd: string) {
|
||||
const resolved = path.resolve(cwd, filePath);
|
||||
const relative = path.relative(cwd, resolved);
|
||||
|
||||
if (relative.startsWith("..") || path.isAbsolute(relative)) {
|
||||
throw new Error(`--body-file must stay within the active workspace: ${filePath}`);
|
||||
}
|
||||
|
||||
return readFile(resolved, "utf8");
|
||||
}
|
||||
65
skills/atlassian/codex/scripts/src/http.ts
Normal file
65
skills/atlassian/codex/scripts/src/http.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { createBasicAuthHeader } from "./config.js";
|
||||
import type { AtlassianConfig, FetchLike } from "./types.js";
|
||||
|
||||
export type HttpMethod = "GET" | "POST" | "PUT";
|
||||
|
||||
export function createJsonHeaders(config: AtlassianConfig, includeJsonBody: boolean) {
|
||||
const headers: Array<[string, string]> = [
|
||||
["Accept", "application/json"],
|
||||
["Authorization", createBasicAuthHeader(config)],
|
||||
];
|
||||
|
||||
if (includeJsonBody) {
|
||||
headers.push(["Content-Type", "application/json"]);
|
||||
}
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
export async function parseResponse(response: Response) {
|
||||
if (response.status === 204) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const contentType = response.headers.get("content-type") ?? "";
|
||||
|
||||
if (contentType.includes("application/json")) {
|
||||
return response.json();
|
||||
}
|
||||
|
||||
return response.text();
|
||||
}
|
||||
|
||||
export async function sendJsonRequest(options: {
|
||||
config: AtlassianConfig;
|
||||
fetchImpl?: FetchLike;
|
||||
url: string;
|
||||
method: HttpMethod;
|
||||
body?: unknown;
|
||||
errorPrefix: string;
|
||||
handleResponseError?: (response: Response) => Error | undefined;
|
||||
}) {
|
||||
const fetchImpl = options.fetchImpl ?? globalThis.fetch;
|
||||
|
||||
if (!fetchImpl) {
|
||||
throw new Error("Fetch API is not available in this runtime");
|
||||
}
|
||||
|
||||
const response = await fetchImpl(options.url, {
|
||||
method: options.method,
|
||||
headers: createJsonHeaders(options.config, options.body !== undefined),
|
||||
...(options.body === undefined ? {} : { body: JSON.stringify(options.body) }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const customError = options.handleResponseError?.(response);
|
||||
|
||||
if (customError) {
|
||||
throw customError;
|
||||
}
|
||||
|
||||
throw new Error(`${options.errorPrefix}: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
return parseResponse(response);
|
||||
}
|
||||
264
skills/atlassian/codex/scripts/src/jira.ts
Normal file
264
skills/atlassian/codex/scripts/src/jira.ts
Normal file
@@ -0,0 +1,264 @@
|
||||
import { markdownToAdf } from "./adf.js";
|
||||
import { sendJsonRequest } from "./http.js";
|
||||
import type { AtlassianConfig, CommandOutput, FetchLike, JiraIssueSummary } from "./types.js";
|
||||
|
||||
const ISSUE_FIELDS = ["summary", "issuetype", "status", "assignee", "created", "updated"] as const;
|
||||
|
||||
type JiraClientOptions = {
|
||||
config: AtlassianConfig;
|
||||
fetchImpl?: FetchLike;
|
||||
};
|
||||
|
||||
type SearchInput = {
|
||||
jql: string;
|
||||
maxResults: number;
|
||||
startAt: number;
|
||||
};
|
||||
|
||||
type CreateInput = {
|
||||
project?: string;
|
||||
type: string;
|
||||
summary: string;
|
||||
description?: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type UpdateInput = {
|
||||
issue: string;
|
||||
summary?: string;
|
||||
description?: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type CommentInput = {
|
||||
issue: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type TransitionInput = {
|
||||
issue: string;
|
||||
transition: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
function normalizeIssue(config: AtlassianConfig, issue: Record<string, unknown>): JiraIssueSummary {
|
||||
const fields = (issue.fields ?? {}) as Record<string, unknown>;
|
||||
const issueType = (fields.issuetype ?? {}) as Record<string, unknown>;
|
||||
const status = (fields.status ?? {}) as Record<string, unknown>;
|
||||
const assignee = (fields.assignee ?? {}) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
key: String(issue.key ?? ""),
|
||||
summary: String(fields.summary ?? ""),
|
||||
issueType: String(issueType.name ?? ""),
|
||||
status: String(status.name ?? ""),
|
||||
assignee: assignee.displayName ? String(assignee.displayName) : undefined,
|
||||
created: String(fields.created ?? ""),
|
||||
updated: String(fields.updated ?? ""),
|
||||
url: `${config.baseUrl}/browse/${issue.key ?? ""}`,
|
||||
};
|
||||
}
|
||||
|
||||
function createRequest(config: AtlassianConfig, method: "GET" | "POST" | "PUT", path: string, body?: unknown) {
|
||||
const url = new URL(path, `${config.jiraBaseUrl}/`);
|
||||
|
||||
return {
|
||||
method,
|
||||
url: url.toString(),
|
||||
...(body === undefined ? {} : { body }),
|
||||
};
|
||||
}
|
||||
|
||||
export function createJiraClient(options: JiraClientOptions) {
|
||||
const fetchImpl = options.fetchImpl ?? globalThis.fetch;
|
||||
|
||||
if (!fetchImpl) {
|
||||
throw new Error("Fetch API is not available in this runtime");
|
||||
}
|
||||
|
||||
async function send(method: "GET" | "POST" | "PUT", path: string, body?: unknown) {
|
||||
const request = createRequest(options.config, method, path, body);
|
||||
return sendJsonRequest({
|
||||
config: options.config,
|
||||
fetchImpl,
|
||||
url: request.url,
|
||||
method,
|
||||
body,
|
||||
errorPrefix: "Jira request failed",
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
async searchIssues(input: SearchInput): Promise<CommandOutput<unknown>> {
|
||||
const raw = (await send("POST", "/rest/api/3/search", {
|
||||
jql: input.jql,
|
||||
maxResults: input.maxResults,
|
||||
startAt: input.startAt,
|
||||
fields: [...ISSUE_FIELDS],
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const issues = Array.isArray(raw.issues) ? raw.issues : [];
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issues: issues.map((issue) => normalizeIssue(options.config, issue as Record<string, unknown>)),
|
||||
startAt: Number(raw.startAt ?? input.startAt),
|
||||
maxResults: Number(raw.maxResults ?? input.maxResults),
|
||||
total: Number(raw.total ?? issues.length),
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async getIssue(issue: string): Promise<CommandOutput<unknown>> {
|
||||
const url = new URL(`/rest/api/3/issue/${issue}`, `${options.config.jiraBaseUrl}/`);
|
||||
url.searchParams.set("fields", ISSUE_FIELDS.join(","));
|
||||
|
||||
const raw = (await send("GET", `${url.pathname}${url.search}`)) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issue: normalizeIssue(options.config, raw),
|
||||
},
|
||||
raw,
|
||||
};
|
||||
},
|
||||
|
||||
async getTransitions(issue: string): Promise<CommandOutput<unknown>> {
|
||||
const raw = (await send(
|
||||
"GET",
|
||||
`/rest/api/3/issue/${issue}/transitions`,
|
||||
)) as { transitions?: Array<Record<string, unknown>> };
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
transitions: (raw.transitions ?? []).map((transition) => ({
|
||||
id: String(transition.id ?? ""),
|
||||
name: String(transition.name ?? ""),
|
||||
toStatus: String(((transition.to ?? {}) as Record<string, unknown>).name ?? ""),
|
||||
hasScreen: Boolean(transition.hasScreen),
|
||||
})),
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async createIssue(input: CreateInput): Promise<CommandOutput<unknown>> {
|
||||
const project = input.project || options.config.defaultProject;
|
||||
|
||||
if (!project) {
|
||||
throw new Error("jira-create requires --project or ATLASSIAN_DEFAULT_PROJECT");
|
||||
}
|
||||
|
||||
const request = createRequest(options.config, "POST", "/rest/api/3/issue", {
|
||||
fields: {
|
||||
project: { key: project },
|
||||
issuetype: { name: input.type },
|
||||
summary: input.summary,
|
||||
...(input.description ? { description: markdownToAdf(input.description) } : {}),
|
||||
},
|
||||
});
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await send("POST", "/rest/api/3/issue", request.body);
|
||||
return { ok: true, data: raw };
|
||||
},
|
||||
|
||||
async updateIssue(input: UpdateInput): Promise<CommandOutput<unknown>> {
|
||||
const fields: Record<string, unknown> = {};
|
||||
|
||||
if (input.summary) {
|
||||
fields.summary = input.summary;
|
||||
}
|
||||
|
||||
if (input.description) {
|
||||
fields.description = markdownToAdf(input.description);
|
||||
}
|
||||
|
||||
if (Object.keys(fields).length === 0) {
|
||||
throw new Error("jira-update requires --summary and/or --description-file");
|
||||
}
|
||||
|
||||
const request = createRequest(options.config, "PUT", `/rest/api/3/issue/${input.issue}`, {
|
||||
fields,
|
||||
});
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
await send("PUT", `/rest/api/3/issue/${input.issue}`, request.body);
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issue: input.issue,
|
||||
updated: true,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async commentIssue(input: CommentInput): Promise<CommandOutput<unknown>> {
|
||||
const request = createRequest(options.config, "POST", `/rest/api/3/issue/${input.issue}/comment`, {
|
||||
body: markdownToAdf(input.body),
|
||||
});
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await send("POST", `/rest/api/3/issue/${input.issue}/comment`, request.body);
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
|
||||
async transitionIssue(input: TransitionInput): Promise<CommandOutput<unknown>> {
|
||||
const request = createRequest(
|
||||
options.config,
|
||||
"POST",
|
||||
`/rest/api/3/issue/${input.issue}/transitions`,
|
||||
{
|
||||
transition: {
|
||||
id: input.transition,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
await send("POST", `/rest/api/3/issue/${input.issue}/transitions`, request.body);
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issue: input.issue,
|
||||
transitioned: true,
|
||||
transition: input.transition,
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
44
skills/atlassian/codex/scripts/src/output.ts
Normal file
44
skills/atlassian/codex/scripts/src/output.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import type { CommandOutput, OutputFormat, Writer } from "./types.js";
|
||||
|
||||
function renderText(payload: CommandOutput<unknown>) {
|
||||
const data = payload.data as Record<string, unknown>;
|
||||
|
||||
if (Array.isArray(data?.issues)) {
|
||||
return data.issues
|
||||
.map((issue) => {
|
||||
const item = issue as Record<string, string>;
|
||||
return `${item.key} [${item.status}] ${item.issueType} - ${item.summary}`;
|
||||
})
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
if (data?.issue && typeof data.issue === "object") {
|
||||
const issue = data.issue as Record<string, string>;
|
||||
return [
|
||||
issue.key,
|
||||
`${issue.issueType} | ${issue.status}`,
|
||||
issue.summary,
|
||||
issue.url,
|
||||
].join("\n");
|
||||
}
|
||||
|
||||
if (Array.isArray(data?.transitions)) {
|
||||
return data.transitions
|
||||
.map((transition) => {
|
||||
const item = transition as Record<string, string>;
|
||||
return `${item.id} ${item.name} -> ${item.toStatus}`;
|
||||
})
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
return JSON.stringify(payload, null, 2);
|
||||
}
|
||||
|
||||
export function writeOutput(
|
||||
writer: Writer,
|
||||
payload: CommandOutput<unknown>,
|
||||
format: OutputFormat = "json",
|
||||
) {
|
||||
const body = format === "text" ? renderText(payload) : JSON.stringify(payload, null, 2);
|
||||
writer.write(`${body}\n`);
|
||||
}
|
||||
85
skills/atlassian/codex/scripts/src/raw.ts
Normal file
85
skills/atlassian/codex/scripts/src/raw.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { readWorkspaceFile } from "./files.js";
|
||||
import { sendJsonRequest } from "./http.js";
|
||||
import type { AtlassianConfig, CommandOutput, FetchLike } from "./types.js";
|
||||
|
||||
const JIRA_ALLOWED_PREFIXES = ["/rest/api/3/"] as const;
|
||||
const CONFLUENCE_ALLOWED_PREFIXES = ["/wiki/api/v2/", "/wiki/rest/api/"] as const;
|
||||
|
||||
type RawInput = {
|
||||
product: "jira" | "confluence";
|
||||
method: string;
|
||||
path: string;
|
||||
bodyFile?: string;
|
||||
cwd: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
function getAllowedPrefixes(product: RawInput["product"]) {
|
||||
return product === "jira" ? JIRA_ALLOWED_PREFIXES : CONFLUENCE_ALLOWED_PREFIXES;
|
||||
}
|
||||
|
||||
function buildUrl(config: AtlassianConfig, product: RawInput["product"], path: string) {
|
||||
const baseUrl = product === "jira" ? config.jiraBaseUrl : config.confluenceBaseUrl;
|
||||
return new URL(path, `${baseUrl}/`).toString();
|
||||
}
|
||||
|
||||
function validateMethod(method: string): asserts method is "GET" | "POST" | "PUT" {
|
||||
if (!["GET", "POST", "PUT"].includes(method)) {
|
||||
throw new Error("raw only allows GET, POST, and PUT");
|
||||
}
|
||||
}
|
||||
|
||||
function validatePath(product: RawInput["product"], path: string) {
|
||||
const allowedPrefixes = getAllowedPrefixes(product);
|
||||
|
||||
if (!allowedPrefixes.some((prefix) => path.startsWith(prefix))) {
|
||||
throw new Error(`raw path is not allowed for ${product}: ${path}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function readRawBody(bodyFile: string | undefined, cwd: string) {
|
||||
if (!bodyFile) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const contents = await readWorkspaceFile(bodyFile, cwd);
|
||||
return JSON.parse(contents) as unknown;
|
||||
}
|
||||
|
||||
export async function runRawCommand(
|
||||
config: AtlassianConfig,
|
||||
fetchImpl: FetchLike | undefined,
|
||||
input: RawInput,
|
||||
): Promise<CommandOutput<unknown>> {
|
||||
validateMethod(input.method);
|
||||
validatePath(input.product, input.path);
|
||||
|
||||
const body = await readRawBody(input.bodyFile, input.cwd);
|
||||
const request = {
|
||||
method: input.method,
|
||||
url: buildUrl(config, input.product, input.path),
|
||||
...(body === undefined ? {} : { body }),
|
||||
};
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const data = await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl,
|
||||
url: request.url,
|
||||
method: input.method,
|
||||
body,
|
||||
errorPrefix: "Raw request failed",
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data,
|
||||
};
|
||||
}
|
||||
35
skills/atlassian/codex/scripts/src/types.ts
Normal file
35
skills/atlassian/codex/scripts/src/types.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
export type AtlassianConfig = {
|
||||
baseUrl: string;
|
||||
jiraBaseUrl: string;
|
||||
confluenceBaseUrl: string;
|
||||
email: string;
|
||||
apiToken: string;
|
||||
defaultProject?: string;
|
||||
defaultSpace?: string;
|
||||
};
|
||||
|
||||
export type CommandOutput<T> = {
|
||||
ok: true;
|
||||
data: T;
|
||||
dryRun?: boolean;
|
||||
raw?: unknown;
|
||||
};
|
||||
|
||||
export type JiraIssueSummary = {
|
||||
key: string;
|
||||
summary: string;
|
||||
issueType: string;
|
||||
status: string;
|
||||
assignee?: string;
|
||||
created: string;
|
||||
updated: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
export type Writer = {
|
||||
write(chunk: string | Uint8Array): unknown;
|
||||
};
|
||||
|
||||
export type FetchLike = typeof fetch;
|
||||
|
||||
export type OutputFormat = "json" | "text";
|
||||
15
skills/atlassian/codex/scripts/tsconfig.json
Normal file
15
skills/atlassian/codex/scripts/tsconfig.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"resolveJsonModule": true,
|
||||
"types": ["node"],
|
||||
"outDir": "dist"
|
||||
},
|
||||
"include": ["src/**/*.ts", "scripts/**/*.ts", "tests/**/*.ts"]
|
||||
}
|
||||
20
skills/atlassian/cursor/scripts/package.json
Normal file
20
skills/atlassian/cursor/scripts/package.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"name": "atlassian-skill-scripts",
|
||||
"version": "1.0.0",
|
||||
"description": "Shared runtime for the Atlassian skill",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"atlassian": "tsx src/cli.ts",
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"commander": "^13.1.0",
|
||||
"dotenv": "^16.4.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.3.0",
|
||||
"tsx": "^4.20.5",
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"packageManager": "pnpm@10.18.1+sha512.77a884a165cbba2d8d1c19e3b4880eee6d2fcabd0d879121e282196b80042351d5eb3ca0935fa599da1dc51265cc68816ad2bddd2a2de5ea9fdf92adbec7cd34"
|
||||
}
|
||||
361
skills/atlassian/cursor/scripts/pnpm-lock.yaml
generated
Normal file
361
skills/atlassian/cursor/scripts/pnpm-lock.yaml
generated
Normal file
@@ -0,0 +1,361 @@
|
||||
lockfileVersion: '9.0'
|
||||
|
||||
settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
commander:
|
||||
specifier: ^13.1.0
|
||||
version: 13.1.0
|
||||
dotenv:
|
||||
specifier: ^16.4.7
|
||||
version: 16.6.1
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^24.3.0
|
||||
version: 24.12.0
|
||||
tsx:
|
||||
specifier: ^4.20.5
|
||||
version: 4.21.0
|
||||
typescript:
|
||||
specifier: ^5.9.2
|
||||
version: 5.9.3
|
||||
|
||||
packages:
|
||||
|
||||
'@esbuild/aix-ppc64@0.27.3':
|
||||
resolution: {integrity: sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ppc64]
|
||||
os: [aix]
|
||||
|
||||
'@esbuild/android-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/android-arm@0.27.3':
|
||||
resolution: {integrity: sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/android-x64@0.27.3':
|
||||
resolution: {integrity: sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/darwin-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [darwin]
|
||||
|
||||
'@esbuild/darwin-x64@0.27.3':
|
||||
resolution: {integrity: sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [darwin]
|
||||
|
||||
'@esbuild/freebsd-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [freebsd]
|
||||
|
||||
'@esbuild/freebsd-x64@0.27.3':
|
||||
resolution: {integrity: sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [freebsd]
|
||||
|
||||
'@esbuild/linux-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-arm@0.27.3':
|
||||
resolution: {integrity: sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-ia32@0.27.3':
|
||||
resolution: {integrity: sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ia32]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-loong64@0.27.3':
|
||||
resolution: {integrity: sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [loong64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-mips64el@0.27.3':
|
||||
resolution: {integrity: sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [mips64el]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-ppc64@0.27.3':
|
||||
resolution: {integrity: sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ppc64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-riscv64@0.27.3':
|
||||
resolution: {integrity: sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [riscv64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-s390x@0.27.3':
|
||||
resolution: {integrity: sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [s390x]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-x64@0.27.3':
|
||||
resolution: {integrity: sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/netbsd-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [netbsd]
|
||||
|
||||
'@esbuild/netbsd-x64@0.27.3':
|
||||
resolution: {integrity: sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [netbsd]
|
||||
|
||||
'@esbuild/openbsd-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [openbsd]
|
||||
|
||||
'@esbuild/openbsd-x64@0.27.3':
|
||||
resolution: {integrity: sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [openbsd]
|
||||
|
||||
'@esbuild/openharmony-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [openharmony]
|
||||
|
||||
'@esbuild/sunos-x64@0.27.3':
|
||||
resolution: {integrity: sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [sunos]
|
||||
|
||||
'@esbuild/win32-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [win32]
|
||||
|
||||
'@esbuild/win32-ia32@0.27.3':
|
||||
resolution: {integrity: sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ia32]
|
||||
os: [win32]
|
||||
|
||||
'@esbuild/win32-x64@0.27.3':
|
||||
resolution: {integrity: sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [win32]
|
||||
|
||||
'@types/node@24.12.0':
|
||||
resolution: {integrity: sha512-GYDxsZi3ChgmckRT9HPU0WEhKLP08ev/Yfcq2AstjrDASOYCSXeyjDsHg4v5t4jOj7cyDX3vmprafKlWIG9MXQ==}
|
||||
|
||||
commander@13.1.0:
|
||||
resolution: {integrity: sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
dotenv@16.6.1:
|
||||
resolution: {integrity: sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
esbuild@0.27.3:
|
||||
resolution: {integrity: sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==}
|
||||
engines: {node: '>=18'}
|
||||
hasBin: true
|
||||
|
||||
fsevents@2.3.3:
|
||||
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
|
||||
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
|
||||
os: [darwin]
|
||||
|
||||
get-tsconfig@4.13.6:
|
||||
resolution: {integrity: sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==}
|
||||
|
||||
resolve-pkg-maps@1.0.0:
|
||||
resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==}
|
||||
|
||||
tsx@4.21.0:
|
||||
resolution: {integrity: sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==}
|
||||
engines: {node: '>=18.0.0'}
|
||||
hasBin: true
|
||||
|
||||
typescript@5.9.3:
|
||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
||||
engines: {node: '>=14.17'}
|
||||
hasBin: true
|
||||
|
||||
undici-types@7.16.0:
|
||||
resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==}
|
||||
|
||||
snapshots:
|
||||
|
||||
'@esbuild/aix-ppc64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-arm@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/darwin-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/darwin-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/freebsd-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/freebsd-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-arm@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-ia32@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-loong64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-mips64el@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-ppc64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-riscv64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-s390x@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/netbsd-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/netbsd-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openbsd-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openbsd-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openharmony-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/sunos-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-ia32@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@types/node@24.12.0':
|
||||
dependencies:
|
||||
undici-types: 7.16.0
|
||||
|
||||
commander@13.1.0: {}
|
||||
|
||||
dotenv@16.6.1: {}
|
||||
|
||||
esbuild@0.27.3:
|
||||
optionalDependencies:
|
||||
'@esbuild/aix-ppc64': 0.27.3
|
||||
'@esbuild/android-arm': 0.27.3
|
||||
'@esbuild/android-arm64': 0.27.3
|
||||
'@esbuild/android-x64': 0.27.3
|
||||
'@esbuild/darwin-arm64': 0.27.3
|
||||
'@esbuild/darwin-x64': 0.27.3
|
||||
'@esbuild/freebsd-arm64': 0.27.3
|
||||
'@esbuild/freebsd-x64': 0.27.3
|
||||
'@esbuild/linux-arm': 0.27.3
|
||||
'@esbuild/linux-arm64': 0.27.3
|
||||
'@esbuild/linux-ia32': 0.27.3
|
||||
'@esbuild/linux-loong64': 0.27.3
|
||||
'@esbuild/linux-mips64el': 0.27.3
|
||||
'@esbuild/linux-ppc64': 0.27.3
|
||||
'@esbuild/linux-riscv64': 0.27.3
|
||||
'@esbuild/linux-s390x': 0.27.3
|
||||
'@esbuild/linux-x64': 0.27.3
|
||||
'@esbuild/netbsd-arm64': 0.27.3
|
||||
'@esbuild/netbsd-x64': 0.27.3
|
||||
'@esbuild/openbsd-arm64': 0.27.3
|
||||
'@esbuild/openbsd-x64': 0.27.3
|
||||
'@esbuild/openharmony-arm64': 0.27.3
|
||||
'@esbuild/sunos-x64': 0.27.3
|
||||
'@esbuild/win32-arm64': 0.27.3
|
||||
'@esbuild/win32-ia32': 0.27.3
|
||||
'@esbuild/win32-x64': 0.27.3
|
||||
|
||||
fsevents@2.3.3:
|
||||
optional: true
|
||||
|
||||
get-tsconfig@4.13.6:
|
||||
dependencies:
|
||||
resolve-pkg-maps: 1.0.0
|
||||
|
||||
resolve-pkg-maps@1.0.0: {}
|
||||
|
||||
tsx@4.21.0:
|
||||
dependencies:
|
||||
esbuild: 0.27.3
|
||||
get-tsconfig: 4.13.6
|
||||
optionalDependencies:
|
||||
fsevents: 2.3.3
|
||||
|
||||
typescript@5.9.3: {}
|
||||
|
||||
undici-types@7.16.0: {}
|
||||
92
skills/atlassian/cursor/scripts/src/adf.ts
Normal file
92
skills/atlassian/cursor/scripts/src/adf.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
const TEXT_NODE = "text";
|
||||
|
||||
function textNode(text: string) {
|
||||
return {
|
||||
type: TEXT_NODE,
|
||||
text,
|
||||
};
|
||||
}
|
||||
|
||||
function paragraphNode(lines: string[]) {
|
||||
const content: Array<{ type: string; text?: string }> = [];
|
||||
|
||||
lines.forEach((line, index) => {
|
||||
if (index > 0) {
|
||||
content.push({ type: "hardBreak" });
|
||||
}
|
||||
|
||||
if (line.length > 0) {
|
||||
content.push(textNode(line));
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
type: "paragraph",
|
||||
...(content.length > 0 ? { content } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
export function markdownToAdf(input: string) {
|
||||
const lines = input.replace(/\r\n/g, "\n").split("\n");
|
||||
const content: Array<Record<string, unknown>> = [];
|
||||
let index = 0;
|
||||
|
||||
while (index < lines.length) {
|
||||
const current = lines[index]?.trimEnd() ?? "";
|
||||
|
||||
if (current.trim().length === 0) {
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
const heading = current.match(/^(#{1,6})\s+(.*)$/);
|
||||
|
||||
if (heading) {
|
||||
content.push({
|
||||
type: "heading",
|
||||
attrs: { level: heading[1].length },
|
||||
content: [textNode(heading[2])],
|
||||
});
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (/^[-*]\s+/.test(current)) {
|
||||
const items: Array<Record<string, unknown>> = [];
|
||||
|
||||
while (index < lines.length && /^[-*]\s+/.test(lines[index] ?? "")) {
|
||||
items.push({
|
||||
type: "listItem",
|
||||
content: [
|
||||
{
|
||||
type: "paragraph",
|
||||
content: [textNode((lines[index] ?? "").replace(/^[-*]\s+/, ""))],
|
||||
},
|
||||
],
|
||||
});
|
||||
index += 1;
|
||||
}
|
||||
|
||||
content.push({
|
||||
type: "bulletList",
|
||||
content: items,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
const paragraphLines: string[] = [];
|
||||
|
||||
while (index < lines.length && (lines[index]?.trim().length ?? 0) > 0) {
|
||||
paragraphLines.push(lines[index] ?? "");
|
||||
index += 1;
|
||||
}
|
||||
|
||||
content.push(paragraphNode(paragraphLines));
|
||||
}
|
||||
|
||||
return {
|
||||
type: "doc",
|
||||
version: 1,
|
||||
content,
|
||||
};
|
||||
}
|
||||
339
skills/atlassian/cursor/scripts/src/cli.ts
Normal file
339
skills/atlassian/cursor/scripts/src/cli.ts
Normal file
@@ -0,0 +1,339 @@
|
||||
import process from "node:process";
|
||||
import { pathToFileURL } from "node:url";
|
||||
|
||||
import { Command } from "commander";
|
||||
|
||||
import { createConfluenceClient } from "./confluence.js";
|
||||
import { loadConfig } from "./config.js";
|
||||
import { readWorkspaceFile } from "./files.js";
|
||||
import { createJiraClient } from "./jira.js";
|
||||
import { writeOutput } from "./output.js";
|
||||
import { runRawCommand } from "./raw.js";
|
||||
import type { FetchLike, OutputFormat, Writer } from "./types.js";
|
||||
|
||||
type CliContext = {
|
||||
cwd?: string;
|
||||
env?: NodeJS.ProcessEnv;
|
||||
fetchImpl?: FetchLike;
|
||||
stdout?: Writer;
|
||||
stderr?: Writer;
|
||||
};
|
||||
|
||||
function resolveFormat(format: string | undefined): OutputFormat {
|
||||
return format === "text" ? "text" : "json";
|
||||
}
|
||||
|
||||
function createRuntime(context: CliContext) {
|
||||
const cwd = context.cwd ?? process.cwd();
|
||||
const env = context.env ?? process.env;
|
||||
const stdout = context.stdout ?? process.stdout;
|
||||
const stderr = context.stderr ?? process.stderr;
|
||||
let configCache: ReturnType<typeof loadConfig> | undefined;
|
||||
let jiraCache: ReturnType<typeof createJiraClient> | undefined;
|
||||
let confluenceCache: ReturnType<typeof createConfluenceClient> | undefined;
|
||||
|
||||
function getConfig() {
|
||||
configCache ??= loadConfig(env, { cwd });
|
||||
return configCache;
|
||||
}
|
||||
|
||||
function getJiraClient() {
|
||||
jiraCache ??= createJiraClient({
|
||||
config: getConfig(),
|
||||
fetchImpl: context.fetchImpl,
|
||||
});
|
||||
return jiraCache;
|
||||
}
|
||||
|
||||
function getConfluenceClient() {
|
||||
confluenceCache ??= createConfluenceClient({
|
||||
config: getConfig(),
|
||||
fetchImpl: context.fetchImpl,
|
||||
});
|
||||
return confluenceCache;
|
||||
}
|
||||
|
||||
async function readBodyFile(filePath: string | undefined) {
|
||||
if (!filePath) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return readWorkspaceFile(filePath, cwd);
|
||||
}
|
||||
|
||||
return {
|
||||
cwd,
|
||||
stdout,
|
||||
stderr,
|
||||
readBodyFile,
|
||||
getConfig,
|
||||
getJiraClient,
|
||||
getConfluenceClient,
|
||||
fetchImpl: context.fetchImpl,
|
||||
};
|
||||
}
|
||||
|
||||
export function buildProgram(context: CliContext = {}) {
|
||||
const runtime = createRuntime(context);
|
||||
const program = new Command()
|
||||
.name("atlassian")
|
||||
.description("Portable Atlassian CLI for multi-agent skills")
|
||||
.version("0.1.0");
|
||||
|
||||
program
|
||||
.command("health")
|
||||
.description("Validate configuration and Atlassian connectivity")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action((options) => {
|
||||
writeOutput(
|
||||
runtime.stdout,
|
||||
{
|
||||
ok: true,
|
||||
data: {
|
||||
baseUrl: runtime.getConfig().baseUrl,
|
||||
jiraBaseUrl: runtime.getConfig().jiraBaseUrl,
|
||||
confluenceBaseUrl: runtime.getConfig().confluenceBaseUrl,
|
||||
defaultProject: runtime.getConfig().defaultProject,
|
||||
defaultSpace: runtime.getConfig().defaultSpace,
|
||||
},
|
||||
},
|
||||
resolveFormat(options.format),
|
||||
);
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-search")
|
||||
.requiredOption("--query <query>", "CQL search query")
|
||||
.option("--max-results <number>", "Maximum results to return", "50")
|
||||
.option("--start-at <number>", "Result offset", "0")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().searchPages({
|
||||
query: options.query,
|
||||
maxResults: Number(options.maxResults),
|
||||
startAt: Number(options.startAt),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-get")
|
||||
.requiredOption("--page <page>", "Confluence page ID")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().getPage(options.page);
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-create")
|
||||
.requiredOption("--title <title>", "Confluence page title")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative storage-format body file")
|
||||
.option("--space <space>", "Confluence space ID")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().createPage({
|
||||
space: options.space,
|
||||
title: options.title,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-update")
|
||||
.requiredOption("--page <page>", "Confluence page ID")
|
||||
.requiredOption("--title <title>", "Confluence page title")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative storage-format body file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().updatePage({
|
||||
pageId: options.page,
|
||||
title: options.title,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-comment")
|
||||
.requiredOption("--page <page>", "Confluence page ID")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative storage-format body file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().commentPage({
|
||||
pageId: options.page,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-children")
|
||||
.requiredOption("--page <page>", "Confluence page ID")
|
||||
.option("--max-results <number>", "Maximum results to return", "50")
|
||||
.option("--start-at <number>", "Cursor/start token", "0")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().listChildren(
|
||||
options.page,
|
||||
Number(options.maxResults),
|
||||
Number(options.startAt),
|
||||
);
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("raw")
|
||||
.requiredOption("--product <product>", "jira or confluence")
|
||||
.requiredOption("--method <method>", "GET, POST, or PUT")
|
||||
.requiredOption("--path <path>", "Validated API path")
|
||||
.option("--body-file <path>", "Workspace-relative JSON file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runRawCommand(runtime.getConfig(), runtime.fetchImpl, {
|
||||
product: options.product,
|
||||
method: String(options.method).toUpperCase(),
|
||||
path: options.path,
|
||||
bodyFile: options.bodyFile,
|
||||
cwd: runtime.cwd,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-search")
|
||||
.requiredOption("--jql <jql>", "JQL expression to execute")
|
||||
.option("--max-results <number>", "Maximum results to return", "50")
|
||||
.option("--start-at <number>", "Result offset", "0")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().searchIssues({
|
||||
jql: options.jql,
|
||||
maxResults: Number(options.maxResults),
|
||||
startAt: Number(options.startAt),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-get")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().getIssue(options.issue);
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-create")
|
||||
.requiredOption("--type <type>", "Issue type name")
|
||||
.requiredOption("--summary <summary>", "Issue summary")
|
||||
.option("--project <project>", "Project key")
|
||||
.option("--description-file <path>", "Workspace-relative markdown/text file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().createIssue({
|
||||
project: options.project,
|
||||
type: options.type,
|
||||
summary: options.summary,
|
||||
description: await runtime.readBodyFile(options.descriptionFile),
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-update")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.option("--summary <summary>", "Updated summary")
|
||||
.option("--description-file <path>", "Workspace-relative markdown/text file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().updateIssue({
|
||||
issue: options.issue,
|
||||
summary: options.summary,
|
||||
description: await runtime.readBodyFile(options.descriptionFile),
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-comment")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative markdown/text file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().commentIssue({
|
||||
issue: options.issue,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-transitions")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().getTransitions(options.issue);
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-transition")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.requiredOption("--transition <transition>", "Transition ID")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().transitionIssue({
|
||||
issue: options.issue,
|
||||
transition: options.transition,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
return program;
|
||||
}
|
||||
|
||||
export async function runCli(argv = process.argv, context: CliContext = {}) {
|
||||
const program = buildProgram(context);
|
||||
await program.parseAsync(argv);
|
||||
}
|
||||
|
||||
const isDirectExecution =
|
||||
Boolean(process.argv[1]) && import.meta.url === pathToFileURL(process.argv[1]).href;
|
||||
|
||||
if (isDirectExecution) {
|
||||
runCli().catch((error: unknown) => {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
process.stderr.write(`${message}\n`);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
}
|
||||
52
skills/atlassian/cursor/scripts/src/config.ts
Normal file
52
skills/atlassian/cursor/scripts/src/config.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import path from "node:path";
|
||||
|
||||
import { config as loadDotEnv } from "dotenv";
|
||||
|
||||
import type { AtlassianConfig } from "./types.js";
|
||||
|
||||
function normalizeBaseUrl(value: string) {
|
||||
return value.replace(/\/+$/, "");
|
||||
}
|
||||
|
||||
function readRequired(env: NodeJS.ProcessEnv, key: string) {
|
||||
const value = env[key]?.trim();
|
||||
|
||||
if (!value) {
|
||||
throw new Error(`Missing required environment variable: ${key}`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
export function loadConfig(
|
||||
env: NodeJS.ProcessEnv = process.env,
|
||||
options?: {
|
||||
cwd?: string;
|
||||
},
|
||||
): AtlassianConfig {
|
||||
loadDotEnv({
|
||||
path: path.resolve(options?.cwd ?? process.cwd(), ".env"),
|
||||
processEnv: env as Record<string, string>,
|
||||
override: false,
|
||||
});
|
||||
|
||||
const baseUrl = normalizeBaseUrl(readRequired(env, "ATLASSIAN_BASE_URL"));
|
||||
|
||||
return {
|
||||
baseUrl,
|
||||
jiraBaseUrl: normalizeBaseUrl(env.ATLASSIAN_JIRA_BASE_URL?.trim() || baseUrl),
|
||||
confluenceBaseUrl: normalizeBaseUrl(env.ATLASSIAN_CONFLUENCE_BASE_URL?.trim() || baseUrl),
|
||||
email: readRequired(env, "ATLASSIAN_EMAIL"),
|
||||
apiToken: readRequired(env, "ATLASSIAN_API_TOKEN"),
|
||||
defaultProject: env.ATLASSIAN_DEFAULT_PROJECT?.trim() || undefined,
|
||||
defaultSpace: env.ATLASSIAN_DEFAULT_SPACE?.trim() || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
export function createBasicAuthHeader(config: {
|
||||
email: string;
|
||||
apiToken: string;
|
||||
[key: string]: unknown;
|
||||
}) {
|
||||
return `Basic ${Buffer.from(`${config.email}:${config.apiToken}`).toString("base64")}`;
|
||||
}
|
||||
292
skills/atlassian/cursor/scripts/src/confluence.ts
Normal file
292
skills/atlassian/cursor/scripts/src/confluence.ts
Normal file
@@ -0,0 +1,292 @@
|
||||
import { sendJsonRequest } from "./http.js";
|
||||
import type { AtlassianConfig, CommandOutput, FetchLike } from "./types.js";
|
||||
|
||||
type ConfluenceClientOptions = {
|
||||
config: AtlassianConfig;
|
||||
fetchImpl?: FetchLike;
|
||||
};
|
||||
|
||||
type SearchInput = {
|
||||
query: string;
|
||||
maxResults: number;
|
||||
startAt: number;
|
||||
};
|
||||
|
||||
type CreateInput = {
|
||||
space?: string;
|
||||
title: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type UpdateInput = {
|
||||
pageId: string;
|
||||
title: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type CommentInput = {
|
||||
pageId: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type PageSummary = {
|
||||
id: string;
|
||||
title: string;
|
||||
type: string;
|
||||
status?: string;
|
||||
spaceId?: string;
|
||||
url?: string;
|
||||
};
|
||||
|
||||
function buildUrl(baseUrl: string, path: string) {
|
||||
return new URL(path, `${baseUrl}/`).toString();
|
||||
}
|
||||
|
||||
function normalizePage(baseUrl: string, page: Record<string, unknown>, excerpt?: string) {
|
||||
const links = (page._links ?? {}) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
id: String(page.id ?? ""),
|
||||
title: String(page.title ?? ""),
|
||||
type: String(page.type ?? "page"),
|
||||
...(page.status ? { status: String(page.status) } : {}),
|
||||
...(page.spaceId ? { spaceId: String(page.spaceId) } : {}),
|
||||
...(excerpt ? { excerpt } : {}),
|
||||
...(links.webui ? { url: `${baseUrl}${String(links.webui)}` } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
export function createConfluenceClient(options: ConfluenceClientOptions) {
|
||||
const config = options.config;
|
||||
|
||||
async function getPageForUpdate(pageId: string) {
|
||||
return (await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: buildUrl(config.confluenceBaseUrl, `/wiki/api/v2/pages/${pageId}?body-format=storage`),
|
||||
method: "GET",
|
||||
errorPrefix: "Confluence request failed",
|
||||
})) as Record<string, unknown>;
|
||||
}
|
||||
|
||||
return {
|
||||
async searchPages(input: SearchInput): Promise<CommandOutput<unknown>> {
|
||||
const url = new URL("/wiki/rest/api/search", `${config.confluenceBaseUrl}/`);
|
||||
url.searchParams.set("cql", input.query);
|
||||
url.searchParams.set("limit", String(input.maxResults));
|
||||
url.searchParams.set("start", String(input.startAt));
|
||||
|
||||
const raw = (await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: url.toString(),
|
||||
method: "GET",
|
||||
errorPrefix: "Confluence request failed",
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const results = Array.isArray(raw.results) ? raw.results : [];
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
pages: results.map((entry) => {
|
||||
const result = entry as Record<string, unknown>;
|
||||
return normalizePage(
|
||||
config.baseUrl,
|
||||
(result.content ?? {}) as Record<string, unknown>,
|
||||
result.excerpt ? String(result.excerpt) : undefined,
|
||||
);
|
||||
}),
|
||||
startAt: Number(raw.start ?? input.startAt),
|
||||
maxResults: Number(raw.limit ?? input.maxResults),
|
||||
total: Number(raw.totalSize ?? raw.size ?? results.length),
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async getPage(pageId: string): Promise<CommandOutput<unknown>> {
|
||||
const raw = (await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: buildUrl(config.confluenceBaseUrl, `/wiki/api/v2/pages/${pageId}?body-format=storage`),
|
||||
method: "GET",
|
||||
errorPrefix: "Confluence request failed",
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const body = ((raw.body ?? {}) as Record<string, unknown>).storage as Record<string, unknown> | undefined;
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
page: {
|
||||
...normalizePage(config.baseUrl, raw),
|
||||
version: Number((((raw.version ?? {}) as Record<string, unknown>).number ?? 0)),
|
||||
body: body?.value ? String(body.value) : "",
|
||||
},
|
||||
},
|
||||
raw,
|
||||
};
|
||||
},
|
||||
|
||||
async listChildren(pageId: string, maxResults: number, startAt: number): Promise<CommandOutput<unknown>> {
|
||||
const url = new URL(`/wiki/api/v2/pages/${pageId}/direct-children`, `${config.confluenceBaseUrl}/`);
|
||||
url.searchParams.set("limit", String(maxResults));
|
||||
url.searchParams.set("cursor", String(startAt));
|
||||
|
||||
const raw = (await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: url.toString(),
|
||||
method: "GET",
|
||||
errorPrefix: "Confluence request failed",
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const results = Array.isArray(raw.results) ? raw.results : [];
|
||||
const links = (raw._links ?? {}) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
pages: results.map((page) => normalizePage(config.baseUrl, page as Record<string, unknown>)),
|
||||
nextCursor: links.next ? String(links.next) : null,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async createPage(input: CreateInput): Promise<CommandOutput<unknown>> {
|
||||
const spaceId = input.space || config.defaultSpace;
|
||||
|
||||
if (!spaceId) {
|
||||
throw new Error("conf-create requires --space or ATLASSIAN_DEFAULT_SPACE");
|
||||
}
|
||||
|
||||
const request = {
|
||||
method: "POST" as const,
|
||||
url: buildUrl(config.confluenceBaseUrl, "/wiki/api/v2/pages"),
|
||||
body: {
|
||||
spaceId,
|
||||
title: input.title,
|
||||
status: "current",
|
||||
body: {
|
||||
representation: "storage",
|
||||
value: input.body,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: request.url,
|
||||
method: request.method,
|
||||
body: request.body,
|
||||
errorPrefix: "Confluence request failed",
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
|
||||
async updatePage(input: UpdateInput): Promise<CommandOutput<unknown>> {
|
||||
const currentPage = await getPageForUpdate(input.pageId);
|
||||
const version = (((currentPage.version ?? {}) as Record<string, unknown>).number ?? 0) as number;
|
||||
const spaceId = String(currentPage.spaceId ?? "");
|
||||
|
||||
const request = {
|
||||
method: "PUT" as const,
|
||||
url: buildUrl(config.confluenceBaseUrl, `/wiki/api/v2/pages/${input.pageId}`),
|
||||
body: {
|
||||
id: input.pageId,
|
||||
status: String(currentPage.status ?? "current"),
|
||||
title: input.title,
|
||||
spaceId,
|
||||
version: {
|
||||
number: Number(version) + 1,
|
||||
},
|
||||
body: {
|
||||
representation: "storage",
|
||||
value: input.body,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: request.url,
|
||||
method: request.method,
|
||||
body: request.body,
|
||||
errorPrefix: "Confluence request failed",
|
||||
handleResponseError(response) {
|
||||
if (response.status === 409) {
|
||||
return new Error(`Confluence update conflict: page ${input.pageId} was updated by someone else`);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
|
||||
async commentPage(input: CommentInput): Promise<CommandOutput<unknown>> {
|
||||
const request = {
|
||||
method: "POST" as const,
|
||||
url: buildUrl(config.confluenceBaseUrl, "/wiki/api/v2/footer-comments"),
|
||||
body: {
|
||||
pageId: input.pageId,
|
||||
body: {
|
||||
representation: "storage",
|
||||
value: input.body,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: request.url,
|
||||
method: request.method,
|
||||
body: request.body,
|
||||
errorPrefix: "Confluence request failed",
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
13
skills/atlassian/cursor/scripts/src/files.ts
Normal file
13
skills/atlassian/cursor/scripts/src/files.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { readFile } from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
|
||||
export async function readWorkspaceFile(filePath: string, cwd: string) {
|
||||
const resolved = path.resolve(cwd, filePath);
|
||||
const relative = path.relative(cwd, resolved);
|
||||
|
||||
if (relative.startsWith("..") || path.isAbsolute(relative)) {
|
||||
throw new Error(`--body-file must stay within the active workspace: ${filePath}`);
|
||||
}
|
||||
|
||||
return readFile(resolved, "utf8");
|
||||
}
|
||||
65
skills/atlassian/cursor/scripts/src/http.ts
Normal file
65
skills/atlassian/cursor/scripts/src/http.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { createBasicAuthHeader } from "./config.js";
|
||||
import type { AtlassianConfig, FetchLike } from "./types.js";
|
||||
|
||||
export type HttpMethod = "GET" | "POST" | "PUT";
|
||||
|
||||
export function createJsonHeaders(config: AtlassianConfig, includeJsonBody: boolean) {
|
||||
const headers: Array<[string, string]> = [
|
||||
["Accept", "application/json"],
|
||||
["Authorization", createBasicAuthHeader(config)],
|
||||
];
|
||||
|
||||
if (includeJsonBody) {
|
||||
headers.push(["Content-Type", "application/json"]);
|
||||
}
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
export async function parseResponse(response: Response) {
|
||||
if (response.status === 204) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const contentType = response.headers.get("content-type") ?? "";
|
||||
|
||||
if (contentType.includes("application/json")) {
|
||||
return response.json();
|
||||
}
|
||||
|
||||
return response.text();
|
||||
}
|
||||
|
||||
export async function sendJsonRequest(options: {
|
||||
config: AtlassianConfig;
|
||||
fetchImpl?: FetchLike;
|
||||
url: string;
|
||||
method: HttpMethod;
|
||||
body?: unknown;
|
||||
errorPrefix: string;
|
||||
handleResponseError?: (response: Response) => Error | undefined;
|
||||
}) {
|
||||
const fetchImpl = options.fetchImpl ?? globalThis.fetch;
|
||||
|
||||
if (!fetchImpl) {
|
||||
throw new Error("Fetch API is not available in this runtime");
|
||||
}
|
||||
|
||||
const response = await fetchImpl(options.url, {
|
||||
method: options.method,
|
||||
headers: createJsonHeaders(options.config, options.body !== undefined),
|
||||
...(options.body === undefined ? {} : { body: JSON.stringify(options.body) }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const customError = options.handleResponseError?.(response);
|
||||
|
||||
if (customError) {
|
||||
throw customError;
|
||||
}
|
||||
|
||||
throw new Error(`${options.errorPrefix}: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
return parseResponse(response);
|
||||
}
|
||||
264
skills/atlassian/cursor/scripts/src/jira.ts
Normal file
264
skills/atlassian/cursor/scripts/src/jira.ts
Normal file
@@ -0,0 +1,264 @@
|
||||
import { markdownToAdf } from "./adf.js";
|
||||
import { sendJsonRequest } from "./http.js";
|
||||
import type { AtlassianConfig, CommandOutput, FetchLike, JiraIssueSummary } from "./types.js";
|
||||
|
||||
const ISSUE_FIELDS = ["summary", "issuetype", "status", "assignee", "created", "updated"] as const;
|
||||
|
||||
type JiraClientOptions = {
|
||||
config: AtlassianConfig;
|
||||
fetchImpl?: FetchLike;
|
||||
};
|
||||
|
||||
type SearchInput = {
|
||||
jql: string;
|
||||
maxResults: number;
|
||||
startAt: number;
|
||||
};
|
||||
|
||||
type CreateInput = {
|
||||
project?: string;
|
||||
type: string;
|
||||
summary: string;
|
||||
description?: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type UpdateInput = {
|
||||
issue: string;
|
||||
summary?: string;
|
||||
description?: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type CommentInput = {
|
||||
issue: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type TransitionInput = {
|
||||
issue: string;
|
||||
transition: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
function normalizeIssue(config: AtlassianConfig, issue: Record<string, unknown>): JiraIssueSummary {
|
||||
const fields = (issue.fields ?? {}) as Record<string, unknown>;
|
||||
const issueType = (fields.issuetype ?? {}) as Record<string, unknown>;
|
||||
const status = (fields.status ?? {}) as Record<string, unknown>;
|
||||
const assignee = (fields.assignee ?? {}) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
key: String(issue.key ?? ""),
|
||||
summary: String(fields.summary ?? ""),
|
||||
issueType: String(issueType.name ?? ""),
|
||||
status: String(status.name ?? ""),
|
||||
assignee: assignee.displayName ? String(assignee.displayName) : undefined,
|
||||
created: String(fields.created ?? ""),
|
||||
updated: String(fields.updated ?? ""),
|
||||
url: `${config.baseUrl}/browse/${issue.key ?? ""}`,
|
||||
};
|
||||
}
|
||||
|
||||
function createRequest(config: AtlassianConfig, method: "GET" | "POST" | "PUT", path: string, body?: unknown) {
|
||||
const url = new URL(path, `${config.jiraBaseUrl}/`);
|
||||
|
||||
return {
|
||||
method,
|
||||
url: url.toString(),
|
||||
...(body === undefined ? {} : { body }),
|
||||
};
|
||||
}
|
||||
|
||||
export function createJiraClient(options: JiraClientOptions) {
|
||||
const fetchImpl = options.fetchImpl ?? globalThis.fetch;
|
||||
|
||||
if (!fetchImpl) {
|
||||
throw new Error("Fetch API is not available in this runtime");
|
||||
}
|
||||
|
||||
async function send(method: "GET" | "POST" | "PUT", path: string, body?: unknown) {
|
||||
const request = createRequest(options.config, method, path, body);
|
||||
return sendJsonRequest({
|
||||
config: options.config,
|
||||
fetchImpl,
|
||||
url: request.url,
|
||||
method,
|
||||
body,
|
||||
errorPrefix: "Jira request failed",
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
async searchIssues(input: SearchInput): Promise<CommandOutput<unknown>> {
|
||||
const raw = (await send("POST", "/rest/api/3/search", {
|
||||
jql: input.jql,
|
||||
maxResults: input.maxResults,
|
||||
startAt: input.startAt,
|
||||
fields: [...ISSUE_FIELDS],
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const issues = Array.isArray(raw.issues) ? raw.issues : [];
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issues: issues.map((issue) => normalizeIssue(options.config, issue as Record<string, unknown>)),
|
||||
startAt: Number(raw.startAt ?? input.startAt),
|
||||
maxResults: Number(raw.maxResults ?? input.maxResults),
|
||||
total: Number(raw.total ?? issues.length),
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async getIssue(issue: string): Promise<CommandOutput<unknown>> {
|
||||
const url = new URL(`/rest/api/3/issue/${issue}`, `${options.config.jiraBaseUrl}/`);
|
||||
url.searchParams.set("fields", ISSUE_FIELDS.join(","));
|
||||
|
||||
const raw = (await send("GET", `${url.pathname}${url.search}`)) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issue: normalizeIssue(options.config, raw),
|
||||
},
|
||||
raw,
|
||||
};
|
||||
},
|
||||
|
||||
async getTransitions(issue: string): Promise<CommandOutput<unknown>> {
|
||||
const raw = (await send(
|
||||
"GET",
|
||||
`/rest/api/3/issue/${issue}/transitions`,
|
||||
)) as { transitions?: Array<Record<string, unknown>> };
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
transitions: (raw.transitions ?? []).map((transition) => ({
|
||||
id: String(transition.id ?? ""),
|
||||
name: String(transition.name ?? ""),
|
||||
toStatus: String(((transition.to ?? {}) as Record<string, unknown>).name ?? ""),
|
||||
hasScreen: Boolean(transition.hasScreen),
|
||||
})),
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async createIssue(input: CreateInput): Promise<CommandOutput<unknown>> {
|
||||
const project = input.project || options.config.defaultProject;
|
||||
|
||||
if (!project) {
|
||||
throw new Error("jira-create requires --project or ATLASSIAN_DEFAULT_PROJECT");
|
||||
}
|
||||
|
||||
const request = createRequest(options.config, "POST", "/rest/api/3/issue", {
|
||||
fields: {
|
||||
project: { key: project },
|
||||
issuetype: { name: input.type },
|
||||
summary: input.summary,
|
||||
...(input.description ? { description: markdownToAdf(input.description) } : {}),
|
||||
},
|
||||
});
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await send("POST", "/rest/api/3/issue", request.body);
|
||||
return { ok: true, data: raw };
|
||||
},
|
||||
|
||||
async updateIssue(input: UpdateInput): Promise<CommandOutput<unknown>> {
|
||||
const fields: Record<string, unknown> = {};
|
||||
|
||||
if (input.summary) {
|
||||
fields.summary = input.summary;
|
||||
}
|
||||
|
||||
if (input.description) {
|
||||
fields.description = markdownToAdf(input.description);
|
||||
}
|
||||
|
||||
if (Object.keys(fields).length === 0) {
|
||||
throw new Error("jira-update requires --summary and/or --description-file");
|
||||
}
|
||||
|
||||
const request = createRequest(options.config, "PUT", `/rest/api/3/issue/${input.issue}`, {
|
||||
fields,
|
||||
});
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
await send("PUT", `/rest/api/3/issue/${input.issue}`, request.body);
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issue: input.issue,
|
||||
updated: true,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async commentIssue(input: CommentInput): Promise<CommandOutput<unknown>> {
|
||||
const request = createRequest(options.config, "POST", `/rest/api/3/issue/${input.issue}/comment`, {
|
||||
body: markdownToAdf(input.body),
|
||||
});
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await send("POST", `/rest/api/3/issue/${input.issue}/comment`, request.body);
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
|
||||
async transitionIssue(input: TransitionInput): Promise<CommandOutput<unknown>> {
|
||||
const request = createRequest(
|
||||
options.config,
|
||||
"POST",
|
||||
`/rest/api/3/issue/${input.issue}/transitions`,
|
||||
{
|
||||
transition: {
|
||||
id: input.transition,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
await send("POST", `/rest/api/3/issue/${input.issue}/transitions`, request.body);
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issue: input.issue,
|
||||
transitioned: true,
|
||||
transition: input.transition,
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
44
skills/atlassian/cursor/scripts/src/output.ts
Normal file
44
skills/atlassian/cursor/scripts/src/output.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import type { CommandOutput, OutputFormat, Writer } from "./types.js";
|
||||
|
||||
function renderText(payload: CommandOutput<unknown>) {
|
||||
const data = payload.data as Record<string, unknown>;
|
||||
|
||||
if (Array.isArray(data?.issues)) {
|
||||
return data.issues
|
||||
.map((issue) => {
|
||||
const item = issue as Record<string, string>;
|
||||
return `${item.key} [${item.status}] ${item.issueType} - ${item.summary}`;
|
||||
})
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
if (data?.issue && typeof data.issue === "object") {
|
||||
const issue = data.issue as Record<string, string>;
|
||||
return [
|
||||
issue.key,
|
||||
`${issue.issueType} | ${issue.status}`,
|
||||
issue.summary,
|
||||
issue.url,
|
||||
].join("\n");
|
||||
}
|
||||
|
||||
if (Array.isArray(data?.transitions)) {
|
||||
return data.transitions
|
||||
.map((transition) => {
|
||||
const item = transition as Record<string, string>;
|
||||
return `${item.id} ${item.name} -> ${item.toStatus}`;
|
||||
})
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
return JSON.stringify(payload, null, 2);
|
||||
}
|
||||
|
||||
export function writeOutput(
|
||||
writer: Writer,
|
||||
payload: CommandOutput<unknown>,
|
||||
format: OutputFormat = "json",
|
||||
) {
|
||||
const body = format === "text" ? renderText(payload) : JSON.stringify(payload, null, 2);
|
||||
writer.write(`${body}\n`);
|
||||
}
|
||||
85
skills/atlassian/cursor/scripts/src/raw.ts
Normal file
85
skills/atlassian/cursor/scripts/src/raw.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { readWorkspaceFile } from "./files.js";
|
||||
import { sendJsonRequest } from "./http.js";
|
||||
import type { AtlassianConfig, CommandOutput, FetchLike } from "./types.js";
|
||||
|
||||
const JIRA_ALLOWED_PREFIXES = ["/rest/api/3/"] as const;
|
||||
const CONFLUENCE_ALLOWED_PREFIXES = ["/wiki/api/v2/", "/wiki/rest/api/"] as const;
|
||||
|
||||
type RawInput = {
|
||||
product: "jira" | "confluence";
|
||||
method: string;
|
||||
path: string;
|
||||
bodyFile?: string;
|
||||
cwd: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
function getAllowedPrefixes(product: RawInput["product"]) {
|
||||
return product === "jira" ? JIRA_ALLOWED_PREFIXES : CONFLUENCE_ALLOWED_PREFIXES;
|
||||
}
|
||||
|
||||
function buildUrl(config: AtlassianConfig, product: RawInput["product"], path: string) {
|
||||
const baseUrl = product === "jira" ? config.jiraBaseUrl : config.confluenceBaseUrl;
|
||||
return new URL(path, `${baseUrl}/`).toString();
|
||||
}
|
||||
|
||||
function validateMethod(method: string): asserts method is "GET" | "POST" | "PUT" {
|
||||
if (!["GET", "POST", "PUT"].includes(method)) {
|
||||
throw new Error("raw only allows GET, POST, and PUT");
|
||||
}
|
||||
}
|
||||
|
||||
function validatePath(product: RawInput["product"], path: string) {
|
||||
const allowedPrefixes = getAllowedPrefixes(product);
|
||||
|
||||
if (!allowedPrefixes.some((prefix) => path.startsWith(prefix))) {
|
||||
throw new Error(`raw path is not allowed for ${product}: ${path}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function readRawBody(bodyFile: string | undefined, cwd: string) {
|
||||
if (!bodyFile) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const contents = await readWorkspaceFile(bodyFile, cwd);
|
||||
return JSON.parse(contents) as unknown;
|
||||
}
|
||||
|
||||
export async function runRawCommand(
|
||||
config: AtlassianConfig,
|
||||
fetchImpl: FetchLike | undefined,
|
||||
input: RawInput,
|
||||
): Promise<CommandOutput<unknown>> {
|
||||
validateMethod(input.method);
|
||||
validatePath(input.product, input.path);
|
||||
|
||||
const body = await readRawBody(input.bodyFile, input.cwd);
|
||||
const request = {
|
||||
method: input.method,
|
||||
url: buildUrl(config, input.product, input.path),
|
||||
...(body === undefined ? {} : { body }),
|
||||
};
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const data = await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl,
|
||||
url: request.url,
|
||||
method: input.method,
|
||||
body,
|
||||
errorPrefix: "Raw request failed",
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data,
|
||||
};
|
||||
}
|
||||
35
skills/atlassian/cursor/scripts/src/types.ts
Normal file
35
skills/atlassian/cursor/scripts/src/types.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
export type AtlassianConfig = {
|
||||
baseUrl: string;
|
||||
jiraBaseUrl: string;
|
||||
confluenceBaseUrl: string;
|
||||
email: string;
|
||||
apiToken: string;
|
||||
defaultProject?: string;
|
||||
defaultSpace?: string;
|
||||
};
|
||||
|
||||
export type CommandOutput<T> = {
|
||||
ok: true;
|
||||
data: T;
|
||||
dryRun?: boolean;
|
||||
raw?: unknown;
|
||||
};
|
||||
|
||||
export type JiraIssueSummary = {
|
||||
key: string;
|
||||
summary: string;
|
||||
issueType: string;
|
||||
status: string;
|
||||
assignee?: string;
|
||||
created: string;
|
||||
updated: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
export type Writer = {
|
||||
write(chunk: string | Uint8Array): unknown;
|
||||
};
|
||||
|
||||
export type FetchLike = typeof fetch;
|
||||
|
||||
export type OutputFormat = "json" | "text";
|
||||
15
skills/atlassian/cursor/scripts/tsconfig.json
Normal file
15
skills/atlassian/cursor/scripts/tsconfig.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"resolveJsonModule": true,
|
||||
"types": ["node"],
|
||||
"outDir": "dist"
|
||||
},
|
||||
"include": ["src/**/*.ts", "scripts/**/*.ts", "tests/**/*.ts"]
|
||||
}
|
||||
20
skills/atlassian/opencode/scripts/package.json
Normal file
20
skills/atlassian/opencode/scripts/package.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"name": "atlassian-skill-scripts",
|
||||
"version": "1.0.0",
|
||||
"description": "Shared runtime for the Atlassian skill",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"atlassian": "tsx src/cli.ts",
|
||||
"typecheck": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"commander": "^13.1.0",
|
||||
"dotenv": "^16.4.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.3.0",
|
||||
"tsx": "^4.20.5",
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"packageManager": "pnpm@10.18.1+sha512.77a884a165cbba2d8d1c19e3b4880eee6d2fcabd0d879121e282196b80042351d5eb3ca0935fa599da1dc51265cc68816ad2bddd2a2de5ea9fdf92adbec7cd34"
|
||||
}
|
||||
361
skills/atlassian/opencode/scripts/pnpm-lock.yaml
generated
Normal file
361
skills/atlassian/opencode/scripts/pnpm-lock.yaml
generated
Normal file
@@ -0,0 +1,361 @@
|
||||
lockfileVersion: '9.0'
|
||||
|
||||
settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
commander:
|
||||
specifier: ^13.1.0
|
||||
version: 13.1.0
|
||||
dotenv:
|
||||
specifier: ^16.4.7
|
||||
version: 16.6.1
|
||||
devDependencies:
|
||||
'@types/node':
|
||||
specifier: ^24.3.0
|
||||
version: 24.12.0
|
||||
tsx:
|
||||
specifier: ^4.20.5
|
||||
version: 4.21.0
|
||||
typescript:
|
||||
specifier: ^5.9.2
|
||||
version: 5.9.3
|
||||
|
||||
packages:
|
||||
|
||||
'@esbuild/aix-ppc64@0.27.3':
|
||||
resolution: {integrity: sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ppc64]
|
||||
os: [aix]
|
||||
|
||||
'@esbuild/android-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/android-arm@0.27.3':
|
||||
resolution: {integrity: sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/android-x64@0.27.3':
|
||||
resolution: {integrity: sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [android]
|
||||
|
||||
'@esbuild/darwin-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [darwin]
|
||||
|
||||
'@esbuild/darwin-x64@0.27.3':
|
||||
resolution: {integrity: sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [darwin]
|
||||
|
||||
'@esbuild/freebsd-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [freebsd]
|
||||
|
||||
'@esbuild/freebsd-x64@0.27.3':
|
||||
resolution: {integrity: sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [freebsd]
|
||||
|
||||
'@esbuild/linux-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-arm@0.27.3':
|
||||
resolution: {integrity: sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-ia32@0.27.3':
|
||||
resolution: {integrity: sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ia32]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-loong64@0.27.3':
|
||||
resolution: {integrity: sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [loong64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-mips64el@0.27.3':
|
||||
resolution: {integrity: sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [mips64el]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-ppc64@0.27.3':
|
||||
resolution: {integrity: sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ppc64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-riscv64@0.27.3':
|
||||
resolution: {integrity: sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [riscv64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-s390x@0.27.3':
|
||||
resolution: {integrity: sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [s390x]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/linux-x64@0.27.3':
|
||||
resolution: {integrity: sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@esbuild/netbsd-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [netbsd]
|
||||
|
||||
'@esbuild/netbsd-x64@0.27.3':
|
||||
resolution: {integrity: sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [netbsd]
|
||||
|
||||
'@esbuild/openbsd-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [openbsd]
|
||||
|
||||
'@esbuild/openbsd-x64@0.27.3':
|
||||
resolution: {integrity: sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [openbsd]
|
||||
|
||||
'@esbuild/openharmony-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [openharmony]
|
||||
|
||||
'@esbuild/sunos-x64@0.27.3':
|
||||
resolution: {integrity: sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [sunos]
|
||||
|
||||
'@esbuild/win32-arm64@0.27.3':
|
||||
resolution: {integrity: sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [arm64]
|
||||
os: [win32]
|
||||
|
||||
'@esbuild/win32-ia32@0.27.3':
|
||||
resolution: {integrity: sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [ia32]
|
||||
os: [win32]
|
||||
|
||||
'@esbuild/win32-x64@0.27.3':
|
||||
resolution: {integrity: sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==}
|
||||
engines: {node: '>=18'}
|
||||
cpu: [x64]
|
||||
os: [win32]
|
||||
|
||||
'@types/node@24.12.0':
|
||||
resolution: {integrity: sha512-GYDxsZi3ChgmckRT9HPU0WEhKLP08ev/Yfcq2AstjrDASOYCSXeyjDsHg4v5t4jOj7cyDX3vmprafKlWIG9MXQ==}
|
||||
|
||||
commander@13.1.0:
|
||||
resolution: {integrity: sha512-/rFeCpNJQbhSZjGVwO9RFV3xPqbnERS8MmIQzCtD/zl6gpJuV/bMLuN92oG3F7d8oDEHHRrujSXNUr8fpjntKw==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
dotenv@16.6.1:
|
||||
resolution: {integrity: sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
esbuild@0.27.3:
|
||||
resolution: {integrity: sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==}
|
||||
engines: {node: '>=18'}
|
||||
hasBin: true
|
||||
|
||||
fsevents@2.3.3:
|
||||
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
|
||||
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
|
||||
os: [darwin]
|
||||
|
||||
get-tsconfig@4.13.6:
|
||||
resolution: {integrity: sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==}
|
||||
|
||||
resolve-pkg-maps@1.0.0:
|
||||
resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==}
|
||||
|
||||
tsx@4.21.0:
|
||||
resolution: {integrity: sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==}
|
||||
engines: {node: '>=18.0.0'}
|
||||
hasBin: true
|
||||
|
||||
typescript@5.9.3:
|
||||
resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==}
|
||||
engines: {node: '>=14.17'}
|
||||
hasBin: true
|
||||
|
||||
undici-types@7.16.0:
|
||||
resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==}
|
||||
|
||||
snapshots:
|
||||
|
||||
'@esbuild/aix-ppc64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-arm@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/android-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/darwin-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/darwin-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/freebsd-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/freebsd-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-arm@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-ia32@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-loong64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-mips64el@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-ppc64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-riscv64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-s390x@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/linux-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/netbsd-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/netbsd-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openbsd-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openbsd-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/openharmony-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/sunos-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-arm64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-ia32@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@esbuild/win32-x64@0.27.3':
|
||||
optional: true
|
||||
|
||||
'@types/node@24.12.0':
|
||||
dependencies:
|
||||
undici-types: 7.16.0
|
||||
|
||||
commander@13.1.0: {}
|
||||
|
||||
dotenv@16.6.1: {}
|
||||
|
||||
esbuild@0.27.3:
|
||||
optionalDependencies:
|
||||
'@esbuild/aix-ppc64': 0.27.3
|
||||
'@esbuild/android-arm': 0.27.3
|
||||
'@esbuild/android-arm64': 0.27.3
|
||||
'@esbuild/android-x64': 0.27.3
|
||||
'@esbuild/darwin-arm64': 0.27.3
|
||||
'@esbuild/darwin-x64': 0.27.3
|
||||
'@esbuild/freebsd-arm64': 0.27.3
|
||||
'@esbuild/freebsd-x64': 0.27.3
|
||||
'@esbuild/linux-arm': 0.27.3
|
||||
'@esbuild/linux-arm64': 0.27.3
|
||||
'@esbuild/linux-ia32': 0.27.3
|
||||
'@esbuild/linux-loong64': 0.27.3
|
||||
'@esbuild/linux-mips64el': 0.27.3
|
||||
'@esbuild/linux-ppc64': 0.27.3
|
||||
'@esbuild/linux-riscv64': 0.27.3
|
||||
'@esbuild/linux-s390x': 0.27.3
|
||||
'@esbuild/linux-x64': 0.27.3
|
||||
'@esbuild/netbsd-arm64': 0.27.3
|
||||
'@esbuild/netbsd-x64': 0.27.3
|
||||
'@esbuild/openbsd-arm64': 0.27.3
|
||||
'@esbuild/openbsd-x64': 0.27.3
|
||||
'@esbuild/openharmony-arm64': 0.27.3
|
||||
'@esbuild/sunos-x64': 0.27.3
|
||||
'@esbuild/win32-arm64': 0.27.3
|
||||
'@esbuild/win32-ia32': 0.27.3
|
||||
'@esbuild/win32-x64': 0.27.3
|
||||
|
||||
fsevents@2.3.3:
|
||||
optional: true
|
||||
|
||||
get-tsconfig@4.13.6:
|
||||
dependencies:
|
||||
resolve-pkg-maps: 1.0.0
|
||||
|
||||
resolve-pkg-maps@1.0.0: {}
|
||||
|
||||
tsx@4.21.0:
|
||||
dependencies:
|
||||
esbuild: 0.27.3
|
||||
get-tsconfig: 4.13.6
|
||||
optionalDependencies:
|
||||
fsevents: 2.3.3
|
||||
|
||||
typescript@5.9.3: {}
|
||||
|
||||
undici-types@7.16.0: {}
|
||||
92
skills/atlassian/opencode/scripts/src/adf.ts
Normal file
92
skills/atlassian/opencode/scripts/src/adf.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
const TEXT_NODE = "text";
|
||||
|
||||
function textNode(text: string) {
|
||||
return {
|
||||
type: TEXT_NODE,
|
||||
text,
|
||||
};
|
||||
}
|
||||
|
||||
function paragraphNode(lines: string[]) {
|
||||
const content: Array<{ type: string; text?: string }> = [];
|
||||
|
||||
lines.forEach((line, index) => {
|
||||
if (index > 0) {
|
||||
content.push({ type: "hardBreak" });
|
||||
}
|
||||
|
||||
if (line.length > 0) {
|
||||
content.push(textNode(line));
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
type: "paragraph",
|
||||
...(content.length > 0 ? { content } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
export function markdownToAdf(input: string) {
|
||||
const lines = input.replace(/\r\n/g, "\n").split("\n");
|
||||
const content: Array<Record<string, unknown>> = [];
|
||||
let index = 0;
|
||||
|
||||
while (index < lines.length) {
|
||||
const current = lines[index]?.trimEnd() ?? "";
|
||||
|
||||
if (current.trim().length === 0) {
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
const heading = current.match(/^(#{1,6})\s+(.*)$/);
|
||||
|
||||
if (heading) {
|
||||
content.push({
|
||||
type: "heading",
|
||||
attrs: { level: heading[1].length },
|
||||
content: [textNode(heading[2])],
|
||||
});
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (/^[-*]\s+/.test(current)) {
|
||||
const items: Array<Record<string, unknown>> = [];
|
||||
|
||||
while (index < lines.length && /^[-*]\s+/.test(lines[index] ?? "")) {
|
||||
items.push({
|
||||
type: "listItem",
|
||||
content: [
|
||||
{
|
||||
type: "paragraph",
|
||||
content: [textNode((lines[index] ?? "").replace(/^[-*]\s+/, ""))],
|
||||
},
|
||||
],
|
||||
});
|
||||
index += 1;
|
||||
}
|
||||
|
||||
content.push({
|
||||
type: "bulletList",
|
||||
content: items,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
const paragraphLines: string[] = [];
|
||||
|
||||
while (index < lines.length && (lines[index]?.trim().length ?? 0) > 0) {
|
||||
paragraphLines.push(lines[index] ?? "");
|
||||
index += 1;
|
||||
}
|
||||
|
||||
content.push(paragraphNode(paragraphLines));
|
||||
}
|
||||
|
||||
return {
|
||||
type: "doc",
|
||||
version: 1,
|
||||
content,
|
||||
};
|
||||
}
|
||||
339
skills/atlassian/opencode/scripts/src/cli.ts
Normal file
339
skills/atlassian/opencode/scripts/src/cli.ts
Normal file
@@ -0,0 +1,339 @@
|
||||
import process from "node:process";
|
||||
import { pathToFileURL } from "node:url";
|
||||
|
||||
import { Command } from "commander";
|
||||
|
||||
import { createConfluenceClient } from "./confluence.js";
|
||||
import { loadConfig } from "./config.js";
|
||||
import { readWorkspaceFile } from "./files.js";
|
||||
import { createJiraClient } from "./jira.js";
|
||||
import { writeOutput } from "./output.js";
|
||||
import { runRawCommand } from "./raw.js";
|
||||
import type { FetchLike, OutputFormat, Writer } from "./types.js";
|
||||
|
||||
type CliContext = {
|
||||
cwd?: string;
|
||||
env?: NodeJS.ProcessEnv;
|
||||
fetchImpl?: FetchLike;
|
||||
stdout?: Writer;
|
||||
stderr?: Writer;
|
||||
};
|
||||
|
||||
function resolveFormat(format: string | undefined): OutputFormat {
|
||||
return format === "text" ? "text" : "json";
|
||||
}
|
||||
|
||||
function createRuntime(context: CliContext) {
|
||||
const cwd = context.cwd ?? process.cwd();
|
||||
const env = context.env ?? process.env;
|
||||
const stdout = context.stdout ?? process.stdout;
|
||||
const stderr = context.stderr ?? process.stderr;
|
||||
let configCache: ReturnType<typeof loadConfig> | undefined;
|
||||
let jiraCache: ReturnType<typeof createJiraClient> | undefined;
|
||||
let confluenceCache: ReturnType<typeof createConfluenceClient> | undefined;
|
||||
|
||||
function getConfig() {
|
||||
configCache ??= loadConfig(env, { cwd });
|
||||
return configCache;
|
||||
}
|
||||
|
||||
function getJiraClient() {
|
||||
jiraCache ??= createJiraClient({
|
||||
config: getConfig(),
|
||||
fetchImpl: context.fetchImpl,
|
||||
});
|
||||
return jiraCache;
|
||||
}
|
||||
|
||||
function getConfluenceClient() {
|
||||
confluenceCache ??= createConfluenceClient({
|
||||
config: getConfig(),
|
||||
fetchImpl: context.fetchImpl,
|
||||
});
|
||||
return confluenceCache;
|
||||
}
|
||||
|
||||
async function readBodyFile(filePath: string | undefined) {
|
||||
if (!filePath) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return readWorkspaceFile(filePath, cwd);
|
||||
}
|
||||
|
||||
return {
|
||||
cwd,
|
||||
stdout,
|
||||
stderr,
|
||||
readBodyFile,
|
||||
getConfig,
|
||||
getJiraClient,
|
||||
getConfluenceClient,
|
||||
fetchImpl: context.fetchImpl,
|
||||
};
|
||||
}
|
||||
|
||||
export function buildProgram(context: CliContext = {}) {
|
||||
const runtime = createRuntime(context);
|
||||
const program = new Command()
|
||||
.name("atlassian")
|
||||
.description("Portable Atlassian CLI for multi-agent skills")
|
||||
.version("0.1.0");
|
||||
|
||||
program
|
||||
.command("health")
|
||||
.description("Validate configuration and Atlassian connectivity")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action((options) => {
|
||||
writeOutput(
|
||||
runtime.stdout,
|
||||
{
|
||||
ok: true,
|
||||
data: {
|
||||
baseUrl: runtime.getConfig().baseUrl,
|
||||
jiraBaseUrl: runtime.getConfig().jiraBaseUrl,
|
||||
confluenceBaseUrl: runtime.getConfig().confluenceBaseUrl,
|
||||
defaultProject: runtime.getConfig().defaultProject,
|
||||
defaultSpace: runtime.getConfig().defaultSpace,
|
||||
},
|
||||
},
|
||||
resolveFormat(options.format),
|
||||
);
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-search")
|
||||
.requiredOption("--query <query>", "CQL search query")
|
||||
.option("--max-results <number>", "Maximum results to return", "50")
|
||||
.option("--start-at <number>", "Result offset", "0")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().searchPages({
|
||||
query: options.query,
|
||||
maxResults: Number(options.maxResults),
|
||||
startAt: Number(options.startAt),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-get")
|
||||
.requiredOption("--page <page>", "Confluence page ID")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().getPage(options.page);
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-create")
|
||||
.requiredOption("--title <title>", "Confluence page title")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative storage-format body file")
|
||||
.option("--space <space>", "Confluence space ID")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().createPage({
|
||||
space: options.space,
|
||||
title: options.title,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-update")
|
||||
.requiredOption("--page <page>", "Confluence page ID")
|
||||
.requiredOption("--title <title>", "Confluence page title")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative storage-format body file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().updatePage({
|
||||
pageId: options.page,
|
||||
title: options.title,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-comment")
|
||||
.requiredOption("--page <page>", "Confluence page ID")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative storage-format body file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().commentPage({
|
||||
pageId: options.page,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("conf-children")
|
||||
.requiredOption("--page <page>", "Confluence page ID")
|
||||
.option("--max-results <number>", "Maximum results to return", "50")
|
||||
.option("--start-at <number>", "Cursor/start token", "0")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getConfluenceClient().listChildren(
|
||||
options.page,
|
||||
Number(options.maxResults),
|
||||
Number(options.startAt),
|
||||
);
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("raw")
|
||||
.requiredOption("--product <product>", "jira or confluence")
|
||||
.requiredOption("--method <method>", "GET, POST, or PUT")
|
||||
.requiredOption("--path <path>", "Validated API path")
|
||||
.option("--body-file <path>", "Workspace-relative JSON file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runRawCommand(runtime.getConfig(), runtime.fetchImpl, {
|
||||
product: options.product,
|
||||
method: String(options.method).toUpperCase(),
|
||||
path: options.path,
|
||||
bodyFile: options.bodyFile,
|
||||
cwd: runtime.cwd,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-search")
|
||||
.requiredOption("--jql <jql>", "JQL expression to execute")
|
||||
.option("--max-results <number>", "Maximum results to return", "50")
|
||||
.option("--start-at <number>", "Result offset", "0")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().searchIssues({
|
||||
jql: options.jql,
|
||||
maxResults: Number(options.maxResults),
|
||||
startAt: Number(options.startAt),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-get")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().getIssue(options.issue);
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-create")
|
||||
.requiredOption("--type <type>", "Issue type name")
|
||||
.requiredOption("--summary <summary>", "Issue summary")
|
||||
.option("--project <project>", "Project key")
|
||||
.option("--description-file <path>", "Workspace-relative markdown/text file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().createIssue({
|
||||
project: options.project,
|
||||
type: options.type,
|
||||
summary: options.summary,
|
||||
description: await runtime.readBodyFile(options.descriptionFile),
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-update")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.option("--summary <summary>", "Updated summary")
|
||||
.option("--description-file <path>", "Workspace-relative markdown/text file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().updateIssue({
|
||||
issue: options.issue,
|
||||
summary: options.summary,
|
||||
description: await runtime.readBodyFile(options.descriptionFile),
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-comment")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.requiredOption("--body-file <path>", "Workspace-relative markdown/text file")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().commentIssue({
|
||||
issue: options.issue,
|
||||
body: (await runtime.readBodyFile(options.bodyFile)) as string,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-transitions")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().getTransitions(options.issue);
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
program
|
||||
.command("jira-transition")
|
||||
.requiredOption("--issue <issue>", "Issue key")
|
||||
.requiredOption("--transition <transition>", "Transition ID")
|
||||
.option("--dry-run", "Print the request without sending it")
|
||||
.option("--format <format>", "Output format", "json")
|
||||
.action(async (options) => {
|
||||
const payload = await runtime.getJiraClient().transitionIssue({
|
||||
issue: options.issue,
|
||||
transition: options.transition,
|
||||
dryRun: Boolean(options.dryRun),
|
||||
});
|
||||
|
||||
writeOutput(runtime.stdout, payload, resolveFormat(options.format));
|
||||
});
|
||||
|
||||
return program;
|
||||
}
|
||||
|
||||
export async function runCli(argv = process.argv, context: CliContext = {}) {
|
||||
const program = buildProgram(context);
|
||||
await program.parseAsync(argv);
|
||||
}
|
||||
|
||||
const isDirectExecution =
|
||||
Boolean(process.argv[1]) && import.meta.url === pathToFileURL(process.argv[1]).href;
|
||||
|
||||
if (isDirectExecution) {
|
||||
runCli().catch((error: unknown) => {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
process.stderr.write(`${message}\n`);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
}
|
||||
52
skills/atlassian/opencode/scripts/src/config.ts
Normal file
52
skills/atlassian/opencode/scripts/src/config.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import path from "node:path";
|
||||
|
||||
import { config as loadDotEnv } from "dotenv";
|
||||
|
||||
import type { AtlassianConfig } from "./types.js";
|
||||
|
||||
function normalizeBaseUrl(value: string) {
|
||||
return value.replace(/\/+$/, "");
|
||||
}
|
||||
|
||||
function readRequired(env: NodeJS.ProcessEnv, key: string) {
|
||||
const value = env[key]?.trim();
|
||||
|
||||
if (!value) {
|
||||
throw new Error(`Missing required environment variable: ${key}`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
export function loadConfig(
|
||||
env: NodeJS.ProcessEnv = process.env,
|
||||
options?: {
|
||||
cwd?: string;
|
||||
},
|
||||
): AtlassianConfig {
|
||||
loadDotEnv({
|
||||
path: path.resolve(options?.cwd ?? process.cwd(), ".env"),
|
||||
processEnv: env as Record<string, string>,
|
||||
override: false,
|
||||
});
|
||||
|
||||
const baseUrl = normalizeBaseUrl(readRequired(env, "ATLASSIAN_BASE_URL"));
|
||||
|
||||
return {
|
||||
baseUrl,
|
||||
jiraBaseUrl: normalizeBaseUrl(env.ATLASSIAN_JIRA_BASE_URL?.trim() || baseUrl),
|
||||
confluenceBaseUrl: normalizeBaseUrl(env.ATLASSIAN_CONFLUENCE_BASE_URL?.trim() || baseUrl),
|
||||
email: readRequired(env, "ATLASSIAN_EMAIL"),
|
||||
apiToken: readRequired(env, "ATLASSIAN_API_TOKEN"),
|
||||
defaultProject: env.ATLASSIAN_DEFAULT_PROJECT?.trim() || undefined,
|
||||
defaultSpace: env.ATLASSIAN_DEFAULT_SPACE?.trim() || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
export function createBasicAuthHeader(config: {
|
||||
email: string;
|
||||
apiToken: string;
|
||||
[key: string]: unknown;
|
||||
}) {
|
||||
return `Basic ${Buffer.from(`${config.email}:${config.apiToken}`).toString("base64")}`;
|
||||
}
|
||||
292
skills/atlassian/opencode/scripts/src/confluence.ts
Normal file
292
skills/atlassian/opencode/scripts/src/confluence.ts
Normal file
@@ -0,0 +1,292 @@
|
||||
import { sendJsonRequest } from "./http.js";
|
||||
import type { AtlassianConfig, CommandOutput, FetchLike } from "./types.js";
|
||||
|
||||
type ConfluenceClientOptions = {
|
||||
config: AtlassianConfig;
|
||||
fetchImpl?: FetchLike;
|
||||
};
|
||||
|
||||
type SearchInput = {
|
||||
query: string;
|
||||
maxResults: number;
|
||||
startAt: number;
|
||||
};
|
||||
|
||||
type CreateInput = {
|
||||
space?: string;
|
||||
title: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type UpdateInput = {
|
||||
pageId: string;
|
||||
title: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type CommentInput = {
|
||||
pageId: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type PageSummary = {
|
||||
id: string;
|
||||
title: string;
|
||||
type: string;
|
||||
status?: string;
|
||||
spaceId?: string;
|
||||
url?: string;
|
||||
};
|
||||
|
||||
function buildUrl(baseUrl: string, path: string) {
|
||||
return new URL(path, `${baseUrl}/`).toString();
|
||||
}
|
||||
|
||||
function normalizePage(baseUrl: string, page: Record<string, unknown>, excerpt?: string) {
|
||||
const links = (page._links ?? {}) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
id: String(page.id ?? ""),
|
||||
title: String(page.title ?? ""),
|
||||
type: String(page.type ?? "page"),
|
||||
...(page.status ? { status: String(page.status) } : {}),
|
||||
...(page.spaceId ? { spaceId: String(page.spaceId) } : {}),
|
||||
...(excerpt ? { excerpt } : {}),
|
||||
...(links.webui ? { url: `${baseUrl}${String(links.webui)}` } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
export function createConfluenceClient(options: ConfluenceClientOptions) {
|
||||
const config = options.config;
|
||||
|
||||
async function getPageForUpdate(pageId: string) {
|
||||
return (await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: buildUrl(config.confluenceBaseUrl, `/wiki/api/v2/pages/${pageId}?body-format=storage`),
|
||||
method: "GET",
|
||||
errorPrefix: "Confluence request failed",
|
||||
})) as Record<string, unknown>;
|
||||
}
|
||||
|
||||
return {
|
||||
async searchPages(input: SearchInput): Promise<CommandOutput<unknown>> {
|
||||
const url = new URL("/wiki/rest/api/search", `${config.confluenceBaseUrl}/`);
|
||||
url.searchParams.set("cql", input.query);
|
||||
url.searchParams.set("limit", String(input.maxResults));
|
||||
url.searchParams.set("start", String(input.startAt));
|
||||
|
||||
const raw = (await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: url.toString(),
|
||||
method: "GET",
|
||||
errorPrefix: "Confluence request failed",
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const results = Array.isArray(raw.results) ? raw.results : [];
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
pages: results.map((entry) => {
|
||||
const result = entry as Record<string, unknown>;
|
||||
return normalizePage(
|
||||
config.baseUrl,
|
||||
(result.content ?? {}) as Record<string, unknown>,
|
||||
result.excerpt ? String(result.excerpt) : undefined,
|
||||
);
|
||||
}),
|
||||
startAt: Number(raw.start ?? input.startAt),
|
||||
maxResults: Number(raw.limit ?? input.maxResults),
|
||||
total: Number(raw.totalSize ?? raw.size ?? results.length),
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async getPage(pageId: string): Promise<CommandOutput<unknown>> {
|
||||
const raw = (await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: buildUrl(config.confluenceBaseUrl, `/wiki/api/v2/pages/${pageId}?body-format=storage`),
|
||||
method: "GET",
|
||||
errorPrefix: "Confluence request failed",
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const body = ((raw.body ?? {}) as Record<string, unknown>).storage as Record<string, unknown> | undefined;
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
page: {
|
||||
...normalizePage(config.baseUrl, raw),
|
||||
version: Number((((raw.version ?? {}) as Record<string, unknown>).number ?? 0)),
|
||||
body: body?.value ? String(body.value) : "",
|
||||
},
|
||||
},
|
||||
raw,
|
||||
};
|
||||
},
|
||||
|
||||
async listChildren(pageId: string, maxResults: number, startAt: number): Promise<CommandOutput<unknown>> {
|
||||
const url = new URL(`/wiki/api/v2/pages/${pageId}/direct-children`, `${config.confluenceBaseUrl}/`);
|
||||
url.searchParams.set("limit", String(maxResults));
|
||||
url.searchParams.set("cursor", String(startAt));
|
||||
|
||||
const raw = (await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: url.toString(),
|
||||
method: "GET",
|
||||
errorPrefix: "Confluence request failed",
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const results = Array.isArray(raw.results) ? raw.results : [];
|
||||
const links = (raw._links ?? {}) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
pages: results.map((page) => normalizePage(config.baseUrl, page as Record<string, unknown>)),
|
||||
nextCursor: links.next ? String(links.next) : null,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async createPage(input: CreateInput): Promise<CommandOutput<unknown>> {
|
||||
const spaceId = input.space || config.defaultSpace;
|
||||
|
||||
if (!spaceId) {
|
||||
throw new Error("conf-create requires --space or ATLASSIAN_DEFAULT_SPACE");
|
||||
}
|
||||
|
||||
const request = {
|
||||
method: "POST" as const,
|
||||
url: buildUrl(config.confluenceBaseUrl, "/wiki/api/v2/pages"),
|
||||
body: {
|
||||
spaceId,
|
||||
title: input.title,
|
||||
status: "current",
|
||||
body: {
|
||||
representation: "storage",
|
||||
value: input.body,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: request.url,
|
||||
method: request.method,
|
||||
body: request.body,
|
||||
errorPrefix: "Confluence request failed",
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
|
||||
async updatePage(input: UpdateInput): Promise<CommandOutput<unknown>> {
|
||||
const currentPage = await getPageForUpdate(input.pageId);
|
||||
const version = (((currentPage.version ?? {}) as Record<string, unknown>).number ?? 0) as number;
|
||||
const spaceId = String(currentPage.spaceId ?? "");
|
||||
|
||||
const request = {
|
||||
method: "PUT" as const,
|
||||
url: buildUrl(config.confluenceBaseUrl, `/wiki/api/v2/pages/${input.pageId}`),
|
||||
body: {
|
||||
id: input.pageId,
|
||||
status: String(currentPage.status ?? "current"),
|
||||
title: input.title,
|
||||
spaceId,
|
||||
version: {
|
||||
number: Number(version) + 1,
|
||||
},
|
||||
body: {
|
||||
representation: "storage",
|
||||
value: input.body,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: request.url,
|
||||
method: request.method,
|
||||
body: request.body,
|
||||
errorPrefix: "Confluence request failed",
|
||||
handleResponseError(response) {
|
||||
if (response.status === 409) {
|
||||
return new Error(`Confluence update conflict: page ${input.pageId} was updated by someone else`);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
|
||||
async commentPage(input: CommentInput): Promise<CommandOutput<unknown>> {
|
||||
const request = {
|
||||
method: "POST" as const,
|
||||
url: buildUrl(config.confluenceBaseUrl, "/wiki/api/v2/footer-comments"),
|
||||
body: {
|
||||
pageId: input.pageId,
|
||||
body: {
|
||||
representation: "storage",
|
||||
value: input.body,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl: options.fetchImpl,
|
||||
url: request.url,
|
||||
method: request.method,
|
||||
body: request.body,
|
||||
errorPrefix: "Confluence request failed",
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
13
skills/atlassian/opencode/scripts/src/files.ts
Normal file
13
skills/atlassian/opencode/scripts/src/files.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import { readFile } from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
|
||||
export async function readWorkspaceFile(filePath: string, cwd: string) {
|
||||
const resolved = path.resolve(cwd, filePath);
|
||||
const relative = path.relative(cwd, resolved);
|
||||
|
||||
if (relative.startsWith("..") || path.isAbsolute(relative)) {
|
||||
throw new Error(`--body-file must stay within the active workspace: ${filePath}`);
|
||||
}
|
||||
|
||||
return readFile(resolved, "utf8");
|
||||
}
|
||||
65
skills/atlassian/opencode/scripts/src/http.ts
Normal file
65
skills/atlassian/opencode/scripts/src/http.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { createBasicAuthHeader } from "./config.js";
|
||||
import type { AtlassianConfig, FetchLike } from "./types.js";
|
||||
|
||||
export type HttpMethod = "GET" | "POST" | "PUT";
|
||||
|
||||
export function createJsonHeaders(config: AtlassianConfig, includeJsonBody: boolean) {
|
||||
const headers: Array<[string, string]> = [
|
||||
["Accept", "application/json"],
|
||||
["Authorization", createBasicAuthHeader(config)],
|
||||
];
|
||||
|
||||
if (includeJsonBody) {
|
||||
headers.push(["Content-Type", "application/json"]);
|
||||
}
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
export async function parseResponse(response: Response) {
|
||||
if (response.status === 204) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const contentType = response.headers.get("content-type") ?? "";
|
||||
|
||||
if (contentType.includes("application/json")) {
|
||||
return response.json();
|
||||
}
|
||||
|
||||
return response.text();
|
||||
}
|
||||
|
||||
export async function sendJsonRequest(options: {
|
||||
config: AtlassianConfig;
|
||||
fetchImpl?: FetchLike;
|
||||
url: string;
|
||||
method: HttpMethod;
|
||||
body?: unknown;
|
||||
errorPrefix: string;
|
||||
handleResponseError?: (response: Response) => Error | undefined;
|
||||
}) {
|
||||
const fetchImpl = options.fetchImpl ?? globalThis.fetch;
|
||||
|
||||
if (!fetchImpl) {
|
||||
throw new Error("Fetch API is not available in this runtime");
|
||||
}
|
||||
|
||||
const response = await fetchImpl(options.url, {
|
||||
method: options.method,
|
||||
headers: createJsonHeaders(options.config, options.body !== undefined),
|
||||
...(options.body === undefined ? {} : { body: JSON.stringify(options.body) }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const customError = options.handleResponseError?.(response);
|
||||
|
||||
if (customError) {
|
||||
throw customError;
|
||||
}
|
||||
|
||||
throw new Error(`${options.errorPrefix}: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
return parseResponse(response);
|
||||
}
|
||||
264
skills/atlassian/opencode/scripts/src/jira.ts
Normal file
264
skills/atlassian/opencode/scripts/src/jira.ts
Normal file
@@ -0,0 +1,264 @@
|
||||
import { markdownToAdf } from "./adf.js";
|
||||
import { sendJsonRequest } from "./http.js";
|
||||
import type { AtlassianConfig, CommandOutput, FetchLike, JiraIssueSummary } from "./types.js";
|
||||
|
||||
const ISSUE_FIELDS = ["summary", "issuetype", "status", "assignee", "created", "updated"] as const;
|
||||
|
||||
type JiraClientOptions = {
|
||||
config: AtlassianConfig;
|
||||
fetchImpl?: FetchLike;
|
||||
};
|
||||
|
||||
type SearchInput = {
|
||||
jql: string;
|
||||
maxResults: number;
|
||||
startAt: number;
|
||||
};
|
||||
|
||||
type CreateInput = {
|
||||
project?: string;
|
||||
type: string;
|
||||
summary: string;
|
||||
description?: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type UpdateInput = {
|
||||
issue: string;
|
||||
summary?: string;
|
||||
description?: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type CommentInput = {
|
||||
issue: string;
|
||||
body: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
type TransitionInput = {
|
||||
issue: string;
|
||||
transition: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
function normalizeIssue(config: AtlassianConfig, issue: Record<string, unknown>): JiraIssueSummary {
|
||||
const fields = (issue.fields ?? {}) as Record<string, unknown>;
|
||||
const issueType = (fields.issuetype ?? {}) as Record<string, unknown>;
|
||||
const status = (fields.status ?? {}) as Record<string, unknown>;
|
||||
const assignee = (fields.assignee ?? {}) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
key: String(issue.key ?? ""),
|
||||
summary: String(fields.summary ?? ""),
|
||||
issueType: String(issueType.name ?? ""),
|
||||
status: String(status.name ?? ""),
|
||||
assignee: assignee.displayName ? String(assignee.displayName) : undefined,
|
||||
created: String(fields.created ?? ""),
|
||||
updated: String(fields.updated ?? ""),
|
||||
url: `${config.baseUrl}/browse/${issue.key ?? ""}`,
|
||||
};
|
||||
}
|
||||
|
||||
function createRequest(config: AtlassianConfig, method: "GET" | "POST" | "PUT", path: string, body?: unknown) {
|
||||
const url = new URL(path, `${config.jiraBaseUrl}/`);
|
||||
|
||||
return {
|
||||
method,
|
||||
url: url.toString(),
|
||||
...(body === undefined ? {} : { body }),
|
||||
};
|
||||
}
|
||||
|
||||
export function createJiraClient(options: JiraClientOptions) {
|
||||
const fetchImpl = options.fetchImpl ?? globalThis.fetch;
|
||||
|
||||
if (!fetchImpl) {
|
||||
throw new Error("Fetch API is not available in this runtime");
|
||||
}
|
||||
|
||||
async function send(method: "GET" | "POST" | "PUT", path: string, body?: unknown) {
|
||||
const request = createRequest(options.config, method, path, body);
|
||||
return sendJsonRequest({
|
||||
config: options.config,
|
||||
fetchImpl,
|
||||
url: request.url,
|
||||
method,
|
||||
body,
|
||||
errorPrefix: "Jira request failed",
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
async searchIssues(input: SearchInput): Promise<CommandOutput<unknown>> {
|
||||
const raw = (await send("POST", "/rest/api/3/search", {
|
||||
jql: input.jql,
|
||||
maxResults: input.maxResults,
|
||||
startAt: input.startAt,
|
||||
fields: [...ISSUE_FIELDS],
|
||||
})) as Record<string, unknown>;
|
||||
|
||||
const issues = Array.isArray(raw.issues) ? raw.issues : [];
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issues: issues.map((issue) => normalizeIssue(options.config, issue as Record<string, unknown>)),
|
||||
startAt: Number(raw.startAt ?? input.startAt),
|
||||
maxResults: Number(raw.maxResults ?? input.maxResults),
|
||||
total: Number(raw.total ?? issues.length),
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async getIssue(issue: string): Promise<CommandOutput<unknown>> {
|
||||
const url = new URL(`/rest/api/3/issue/${issue}`, `${options.config.jiraBaseUrl}/`);
|
||||
url.searchParams.set("fields", ISSUE_FIELDS.join(","));
|
||||
|
||||
const raw = (await send("GET", `${url.pathname}${url.search}`)) as Record<string, unknown>;
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issue: normalizeIssue(options.config, raw),
|
||||
},
|
||||
raw,
|
||||
};
|
||||
},
|
||||
|
||||
async getTransitions(issue: string): Promise<CommandOutput<unknown>> {
|
||||
const raw = (await send(
|
||||
"GET",
|
||||
`/rest/api/3/issue/${issue}/transitions`,
|
||||
)) as { transitions?: Array<Record<string, unknown>> };
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
transitions: (raw.transitions ?? []).map((transition) => ({
|
||||
id: String(transition.id ?? ""),
|
||||
name: String(transition.name ?? ""),
|
||||
toStatus: String(((transition.to ?? {}) as Record<string, unknown>).name ?? ""),
|
||||
hasScreen: Boolean(transition.hasScreen),
|
||||
})),
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async createIssue(input: CreateInput): Promise<CommandOutput<unknown>> {
|
||||
const project = input.project || options.config.defaultProject;
|
||||
|
||||
if (!project) {
|
||||
throw new Error("jira-create requires --project or ATLASSIAN_DEFAULT_PROJECT");
|
||||
}
|
||||
|
||||
const request = createRequest(options.config, "POST", "/rest/api/3/issue", {
|
||||
fields: {
|
||||
project: { key: project },
|
||||
issuetype: { name: input.type },
|
||||
summary: input.summary,
|
||||
...(input.description ? { description: markdownToAdf(input.description) } : {}),
|
||||
},
|
||||
});
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await send("POST", "/rest/api/3/issue", request.body);
|
||||
return { ok: true, data: raw };
|
||||
},
|
||||
|
||||
async updateIssue(input: UpdateInput): Promise<CommandOutput<unknown>> {
|
||||
const fields: Record<string, unknown> = {};
|
||||
|
||||
if (input.summary) {
|
||||
fields.summary = input.summary;
|
||||
}
|
||||
|
||||
if (input.description) {
|
||||
fields.description = markdownToAdf(input.description);
|
||||
}
|
||||
|
||||
if (Object.keys(fields).length === 0) {
|
||||
throw new Error("jira-update requires --summary and/or --description-file");
|
||||
}
|
||||
|
||||
const request = createRequest(options.config, "PUT", `/rest/api/3/issue/${input.issue}`, {
|
||||
fields,
|
||||
});
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
await send("PUT", `/rest/api/3/issue/${input.issue}`, request.body);
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issue: input.issue,
|
||||
updated: true,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
async commentIssue(input: CommentInput): Promise<CommandOutput<unknown>> {
|
||||
const request = createRequest(options.config, "POST", `/rest/api/3/issue/${input.issue}/comment`, {
|
||||
body: markdownToAdf(input.body),
|
||||
});
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const raw = await send("POST", `/rest/api/3/issue/${input.issue}/comment`, request.body);
|
||||
return {
|
||||
ok: true,
|
||||
data: raw,
|
||||
};
|
||||
},
|
||||
|
||||
async transitionIssue(input: TransitionInput): Promise<CommandOutput<unknown>> {
|
||||
const request = createRequest(
|
||||
options.config,
|
||||
"POST",
|
||||
`/rest/api/3/issue/${input.issue}/transitions`,
|
||||
{
|
||||
transition: {
|
||||
id: input.transition,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
await send("POST", `/rest/api/3/issue/${input.issue}/transitions`, request.body);
|
||||
return {
|
||||
ok: true,
|
||||
data: {
|
||||
issue: input.issue,
|
||||
transitioned: true,
|
||||
transition: input.transition,
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
}
|
||||
44
skills/atlassian/opencode/scripts/src/output.ts
Normal file
44
skills/atlassian/opencode/scripts/src/output.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import type { CommandOutput, OutputFormat, Writer } from "./types.js";
|
||||
|
||||
function renderText(payload: CommandOutput<unknown>) {
|
||||
const data = payload.data as Record<string, unknown>;
|
||||
|
||||
if (Array.isArray(data?.issues)) {
|
||||
return data.issues
|
||||
.map((issue) => {
|
||||
const item = issue as Record<string, string>;
|
||||
return `${item.key} [${item.status}] ${item.issueType} - ${item.summary}`;
|
||||
})
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
if (data?.issue && typeof data.issue === "object") {
|
||||
const issue = data.issue as Record<string, string>;
|
||||
return [
|
||||
issue.key,
|
||||
`${issue.issueType} | ${issue.status}`,
|
||||
issue.summary,
|
||||
issue.url,
|
||||
].join("\n");
|
||||
}
|
||||
|
||||
if (Array.isArray(data?.transitions)) {
|
||||
return data.transitions
|
||||
.map((transition) => {
|
||||
const item = transition as Record<string, string>;
|
||||
return `${item.id} ${item.name} -> ${item.toStatus}`;
|
||||
})
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
return JSON.stringify(payload, null, 2);
|
||||
}
|
||||
|
||||
export function writeOutput(
|
||||
writer: Writer,
|
||||
payload: CommandOutput<unknown>,
|
||||
format: OutputFormat = "json",
|
||||
) {
|
||||
const body = format === "text" ? renderText(payload) : JSON.stringify(payload, null, 2);
|
||||
writer.write(`${body}\n`);
|
||||
}
|
||||
85
skills/atlassian/opencode/scripts/src/raw.ts
Normal file
85
skills/atlassian/opencode/scripts/src/raw.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { readWorkspaceFile } from "./files.js";
|
||||
import { sendJsonRequest } from "./http.js";
|
||||
import type { AtlassianConfig, CommandOutput, FetchLike } from "./types.js";
|
||||
|
||||
const JIRA_ALLOWED_PREFIXES = ["/rest/api/3/"] as const;
|
||||
const CONFLUENCE_ALLOWED_PREFIXES = ["/wiki/api/v2/", "/wiki/rest/api/"] as const;
|
||||
|
||||
type RawInput = {
|
||||
product: "jira" | "confluence";
|
||||
method: string;
|
||||
path: string;
|
||||
bodyFile?: string;
|
||||
cwd: string;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
|
||||
function getAllowedPrefixes(product: RawInput["product"]) {
|
||||
return product === "jira" ? JIRA_ALLOWED_PREFIXES : CONFLUENCE_ALLOWED_PREFIXES;
|
||||
}
|
||||
|
||||
function buildUrl(config: AtlassianConfig, product: RawInput["product"], path: string) {
|
||||
const baseUrl = product === "jira" ? config.jiraBaseUrl : config.confluenceBaseUrl;
|
||||
return new URL(path, `${baseUrl}/`).toString();
|
||||
}
|
||||
|
||||
function validateMethod(method: string): asserts method is "GET" | "POST" | "PUT" {
|
||||
if (!["GET", "POST", "PUT"].includes(method)) {
|
||||
throw new Error("raw only allows GET, POST, and PUT");
|
||||
}
|
||||
}
|
||||
|
||||
function validatePath(product: RawInput["product"], path: string) {
|
||||
const allowedPrefixes = getAllowedPrefixes(product);
|
||||
|
||||
if (!allowedPrefixes.some((prefix) => path.startsWith(prefix))) {
|
||||
throw new Error(`raw path is not allowed for ${product}: ${path}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function readRawBody(bodyFile: string | undefined, cwd: string) {
|
||||
if (!bodyFile) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const contents = await readWorkspaceFile(bodyFile, cwd);
|
||||
return JSON.parse(contents) as unknown;
|
||||
}
|
||||
|
||||
export async function runRawCommand(
|
||||
config: AtlassianConfig,
|
||||
fetchImpl: FetchLike | undefined,
|
||||
input: RawInput,
|
||||
): Promise<CommandOutput<unknown>> {
|
||||
validateMethod(input.method);
|
||||
validatePath(input.product, input.path);
|
||||
|
||||
const body = await readRawBody(input.bodyFile, input.cwd);
|
||||
const request = {
|
||||
method: input.method,
|
||||
url: buildUrl(config, input.product, input.path),
|
||||
...(body === undefined ? {} : { body }),
|
||||
};
|
||||
|
||||
if (input.dryRun) {
|
||||
return {
|
||||
ok: true,
|
||||
dryRun: true,
|
||||
data: request,
|
||||
};
|
||||
}
|
||||
|
||||
const data = await sendJsonRequest({
|
||||
config,
|
||||
fetchImpl,
|
||||
url: request.url,
|
||||
method: input.method,
|
||||
body,
|
||||
errorPrefix: "Raw request failed",
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
data,
|
||||
};
|
||||
}
|
||||
35
skills/atlassian/opencode/scripts/src/types.ts
Normal file
35
skills/atlassian/opencode/scripts/src/types.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
export type AtlassianConfig = {
|
||||
baseUrl: string;
|
||||
jiraBaseUrl: string;
|
||||
confluenceBaseUrl: string;
|
||||
email: string;
|
||||
apiToken: string;
|
||||
defaultProject?: string;
|
||||
defaultSpace?: string;
|
||||
};
|
||||
|
||||
export type CommandOutput<T> = {
|
||||
ok: true;
|
||||
data: T;
|
||||
dryRun?: boolean;
|
||||
raw?: unknown;
|
||||
};
|
||||
|
||||
export type JiraIssueSummary = {
|
||||
key: string;
|
||||
summary: string;
|
||||
issueType: string;
|
||||
status: string;
|
||||
assignee?: string;
|
||||
created: string;
|
||||
updated: string;
|
||||
url: string;
|
||||
};
|
||||
|
||||
export type Writer = {
|
||||
write(chunk: string | Uint8Array): unknown;
|
||||
};
|
||||
|
||||
export type FetchLike = typeof fetch;
|
||||
|
||||
export type OutputFormat = "json" | "text";
|
||||
15
skills/atlassian/opencode/scripts/tsconfig.json
Normal file
15
skills/atlassian/opencode/scripts/tsconfig.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"resolveJsonModule": true,
|
||||
"types": ["node"],
|
||||
"outDir": "dist"
|
||||
},
|
||||
"include": ["src/**/*.ts", "scripts/**/*.ts", "tests/**/*.ts"]
|
||||
}
|
||||
@@ -1 +1,60 @@
|
||||
console.log("sync-agents not implemented yet");
|
||||
import { cp, mkdir, readFile, rm, writeFile } from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
const SHARED_SCRIPTS_DIR = path.resolve(__dirname, "..");
|
||||
const ATLASSIAN_SKILL_DIR = path.resolve(SHARED_SCRIPTS_DIR, "..", "..");
|
||||
const AGENTS = ["codex", "claude-code", "cursor", "opencode"] as const;
|
||||
const ENTRIES_TO_COPY = ["pnpm-lock.yaml", "tsconfig.json", "src"] as const;
|
||||
|
||||
async function replaceEntry(source: string, target: string) {
|
||||
await rm(target, { recursive: true, force: true });
|
||||
await cp(source, target, { recursive: true });
|
||||
}
|
||||
|
||||
async function syncAgent(agent: (typeof AGENTS)[number]) {
|
||||
const targetScriptsDir = path.join(ATLASSIAN_SKILL_DIR, agent, "scripts");
|
||||
await mkdir(targetScriptsDir, { recursive: true });
|
||||
|
||||
for (const entry of ENTRIES_TO_COPY) {
|
||||
await replaceEntry(
|
||||
path.join(SHARED_SCRIPTS_DIR, entry),
|
||||
path.join(targetScriptsDir, entry),
|
||||
);
|
||||
}
|
||||
|
||||
const sourcePackageJson = JSON.parse(
|
||||
await readFile(path.join(SHARED_SCRIPTS_DIR, "package.json"), "utf8"),
|
||||
) as {
|
||||
scripts?: Record<string, string>;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
|
||||
sourcePackageJson.scripts = {
|
||||
atlassian: sourcePackageJson.scripts?.atlassian ?? "tsx src/cli.ts",
|
||||
typecheck: sourcePackageJson.scripts?.typecheck ?? "tsc --noEmit",
|
||||
};
|
||||
|
||||
await writeFile(
|
||||
path.join(targetScriptsDir, "package.json"),
|
||||
`${JSON.stringify(sourcePackageJson, null, 2)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
for (const agent of AGENTS) {
|
||||
await syncAgent(agent);
|
||||
}
|
||||
|
||||
console.log(`Synced runtime bundle into ${AGENTS.length} agent script directories.`);
|
||||
}
|
||||
|
||||
main().catch((error: unknown) => {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
console.error(message);
|
||||
process.exitCode = 1;
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user