Remove pi agent infrastructure

This commit is contained in:
2026-03-31 21:25:31 +00:00
parent 66ff22f9e6
commit 813fd347d5
24 changed files with 709 additions and 4344 deletions

View File

@@ -1,25 +0,0 @@
---
description: Review code changes (working-copy, bookmark, change, PR, or folder)
agent: review
subtask: true
---
Review the following code changes. $ARGUMENTS
Current repository state:
```
!`jj log -r '::@ ~ ::trunk()' -n 15 --no-graph -T 'change_id.shortest(8) ++ " " ++ coalesce(bookmarks, "") ++ " " ++ description.first_line() ++ "\n"' 2>/dev/null || echo "Not a jj repository or no divergence from trunk"`
```
Working copy status:
```
!`jj diff --summary 2>/dev/null || echo "No working-copy changes"`
```
Available bookmarks:
```
!`jj bookmark list --all-remotes -T 'name ++ if(remote, "@" ++ remote, "") ++ "\n"' 2>/dev/null | head -20 || echo "No bookmarks"`
```

View File

@@ -1,4 +1,79 @@
import { type Plugin, tool } from "@opencode-ai/plugin" import type {
TuiPlugin,
TuiDialogSelectOption,
} from "@opencode-ai/plugin/tui"
type BookmarkRef = { name: string; remote?: string }
type Change = { changeId: string; title: string }
type ReviewTarget =
| { type: "workingCopy" }
| { type: "baseBookmark"; bookmark: string; remote?: string }
| { type: "change"; changeId: string; title?: string }
| {
type: "pullRequest"
prNumber: number
baseBookmark: string
baseRemote?: string
title: string
}
| { type: "folder"; paths: string[] }
function bookmarkLabel(b: BookmarkRef): string {
return b.remote ? `${b.name}@${b.remote}` : b.name
}
function bookmarkRevset(b: BookmarkRef): string {
const q = JSON.stringify(b.name)
if (b.remote) {
return `remote_bookmarks(exact:${q}, exact:${JSON.stringify(b.remote)})`
}
return `bookmarks(exact:${q})`
}
function parseBookmarks(stdout: string): BookmarkRef[] {
const seen = new Set<string>()
return stdout
.trim()
.split("\n")
.map((line) => line.trim())
.filter(Boolean)
.map((line) => {
const [name, remote = ""] = line.split("\t")
return {
name: name.trim(),
remote: remote.trim() || undefined,
}
})
.filter((b) => b.name && b.remote !== "git")
.filter((b) => {
const key = `${b.name}@${b.remote ?? ""}`
if (seen.has(key)) return false
seen.add(key)
return true
})
}
function parseChanges(stdout: string): Change[] {
return stdout
.trim()
.split("\n")
.map((line) => line.trim())
.filter(Boolean)
.map((line) => {
const [changeId, ...rest] = line.split("\t")
return { changeId, title: rest.join(" ") }
})
}
function parsePrRef(ref: string): number | null {
const trimmed = ref.trim()
const num = parseInt(trimmed, 10)
if (!isNaN(num) && num > 0) return num
const urlMatch = trimmed.match(/github\.com\/[^/]+\/[^/]+\/pull\/(\d+)/)
if (urlMatch) return parseInt(urlMatch[1], 10)
return null
}
function normalizeRemoteUrl(value: string): string { function normalizeRemoteUrl(value: string): string {
return value return value
@@ -10,228 +85,620 @@ function normalizeRemoteUrl(value: string): string {
} }
function sanitizeRemoteName(value: string): string { function sanitizeRemoteName(value: string): string {
const sanitized = value.replace(/[^a-zA-Z0-9._-]+/g, "-").replace(/^-+|-+$/g, "") return (
return sanitized || "gh-pr" value.replace(/[^a-zA-Z0-9._-]+/g, "-").replace(/^-+|-+$/g, "") ||
"gh-pr"
)
} }
export const ReviewPlugin: Plugin = async ({ $ }) => { export const tui: TuiPlugin = async (api) => {
return { const cwd = api.state.path.directory
tool: {
review_materialize_pr: tool({
description:
"Materialize a GitHub pull request locally using jj for code review. " +
"Fetches the PR branch, creates a new jj change on top of it, and returns " +
"metadata needed for the review. Handles cross-repository (forked) PRs. " +
"Call this before reviewing a PR to set up the local state.",
args: {
prNumber: tool.schema
.number()
.describe("The PR number to materialize (e.g. 123)"),
},
async execute(args, context) {
const prNumber = args.prNumber
// Check for pending working-copy changes // -- shell helpers -------------------------------------------------------
const statusResult =
await $`jj diff --summary 2>/dev/null`.nothrow().quiet()
if (
statusResult.exitCode === 0 &&
statusResult.stdout.toString().trim().length > 0
) {
return JSON.stringify({
success: false,
error:
"Cannot materialize PR: you have local jj changes. Please snapshot or discard them first.",
})
}
// Save current position for later restoration async function exec(
const currentChangeResult = cmd: string,
await $`jj log -r @ --no-graph -T 'change_id.shortest(8)'` args: string[],
.nothrow() ): Promise<{ stdout: string; exitCode: number; stderr: string }> {
.quiet() const proc = Bun.spawn([cmd, ...args], {
const savedChangeId = currentChangeResult.stdout.toString().trim() cwd,
stdout: "pipe",
// Get PR info from GitHub CLI stderr: "pipe",
const prInfoResult = })
await $`gh pr view ${prNumber} --json baseRefName,title,headRefName,isCrossRepository,headRepository,headRepositoryOwner` const [stdout, stderr] = await Promise.all([
.nothrow() new Response(proc.stdout).text(),
.quiet() new Response(proc.stderr).text(),
if (prInfoResult.exitCode !== 0) { ])
return JSON.stringify({ const exitCode = await proc.exited
success: false, return { stdout, exitCode, stderr }
error: `Could not find PR #${prNumber}. Make sure gh is authenticated and the PR exists.`,
})
}
let prInfo: {
baseRefName: string
title: string
headRefName: string
isCrossRepository: boolean
headRepository?: { name: string; url: string }
headRepositoryOwner?: { login: string }
}
try {
prInfo = JSON.parse(prInfoResult.stdout.toString())
} catch {
return JSON.stringify({
success: false,
error: "Failed to parse PR info from gh CLI",
})
}
// Determine the remote to use
const remotesResult =
await $`jj git remote list`.nothrow().quiet()
const remotes = remotesResult.stdout
.toString()
.trim()
.split("\n")
.filter(Boolean)
.map((line: string) => {
const [name, ...urlParts] = line.split(/\s+/)
return { name, url: urlParts.join(" ") }
})
.filter(
(r: { name: string; url: string }) => r.name && r.url,
)
const defaultRemote =
remotes.find(
(r: { name: string; url: string }) =>
r.name === "origin",
) ?? remotes[0]
if (!defaultRemote) {
return JSON.stringify({
success: false,
error: "No jj remotes are configured for this repository",
})
}
let remoteName = defaultRemote.name
let addedTemporaryRemote = false
if (prInfo.isCrossRepository) {
const repoSlug =
prInfo.headRepositoryOwner?.login &&
prInfo.headRepository?.name
? `${prInfo.headRepositoryOwner.login}/${prInfo.headRepository.name}`.toLowerCase()
: undefined
const forkUrl = prInfo.headRepository?.url
// Check if we already have a remote for this fork
const existingRemote = remotes.find(
(r: { name: string; url: string }) => {
if (
forkUrl &&
normalizeRemoteUrl(r.url) ===
normalizeRemoteUrl(forkUrl)
) {
return true
}
return repoSlug
? normalizeRemoteUrl(r.url).includes(
`github.com/${repoSlug}`,
)
: false
},
)
if (existingRemote) {
remoteName = existingRemote.name
} else if (forkUrl) {
const remoteBaseName = sanitizeRemoteName(
`gh-pr-${prInfo.headRepositoryOwner?.login ?? "remote"}-${prInfo.headRepository?.name ?? prNumber}`,
)
const existingNames = new Set(
remotes.map(
(r: { name: string; url: string }) =>
r.name,
),
)
remoteName = remoteBaseName
let suffix = 2
while (existingNames.has(remoteName)) {
remoteName = `${remoteBaseName}-${suffix}`
suffix += 1
}
const addResult =
await $`jj git remote add ${remoteName} ${forkUrl}`
.nothrow()
.quiet()
if (addResult.exitCode !== 0) {
return JSON.stringify({
success: false,
error:
addResult.stderr.toString() ||
"Failed to add PR remote",
})
}
addedTemporaryRemote = true
} else {
return JSON.stringify({
success: false,
error: "PR head repository URL is unavailable",
})
}
}
// Fetch the PR branch
const fetchResult =
await $`jj git fetch --remote ${remoteName} --branch ${prInfo.headRefName}`
.nothrow()
.quiet()
if (fetchResult.exitCode !== 0) {
if (addedTemporaryRemote) {
await $`jj git remote remove ${remoteName}`
.nothrow()
.quiet()
}
return JSON.stringify({
success: false,
error:
fetchResult.stderr.toString() ||
"Failed to fetch PR branch",
})
}
// Create a new change on top of the PR branch
const bookmarkRevset = `remote_bookmarks(exact:"${prInfo.headRefName}", exact:"${remoteName}")`
const editResult =
await $`jj new ${bookmarkRevset}`.nothrow().quiet()
if (editResult.exitCode !== 0) {
if (addedTemporaryRemote) {
await $`jj git remote remove ${remoteName}`
.nothrow()
.quiet()
}
return JSON.stringify({
success: false,
error:
editResult.stderr.toString() ||
"Failed to create change on PR branch",
})
}
// Clean up temporary remote
if (addedTemporaryRemote) {
await $`jj git remote remove ${remoteName}`
.nothrow()
.quiet()
}
return JSON.stringify({
success: true,
prNumber,
title: prInfo.title,
baseBookmark: prInfo.baseRefName,
headBookmark: prInfo.headRefName,
remote: remoteName,
savedChangeId,
})
},
}),
},
} }
async function jj(
...args: string[]
): Promise<{ stdout: string; ok: boolean }> {
const r = await exec("jj", args)
return { stdout: r.stdout, ok: r.exitCode === 0 }
}
async function gh(
...args: string[]
): Promise<{ stdout: string; ok: boolean; stderr: string }> {
const r = await exec("gh", args)
return { stdout: r.stdout, ok: r.exitCode === 0, stderr: r.stderr }
}
// -- jj helpers ----------------------------------------------------------
async function isJjRepo(): Promise<boolean> {
return (await jj("root")).ok
}
async function hasWorkingCopyChanges(): Promise<boolean> {
const r = await jj("diff", "--summary")
return r.ok && r.stdout.trim().length > 0
}
async function getBookmarks(): Promise<BookmarkRef[]> {
const r = await jj(
"bookmark",
"list",
"--all-remotes",
"-T",
'name ++ "\\t" ++ remote ++ "\\n"',
)
if (!r.ok) return []
return parseBookmarks(r.stdout)
}
async function getCurrentBookmarks(): Promise<BookmarkRef[]> {
const headRevset = (await hasWorkingCopyChanges()) ? "@" : "@-"
const r = await jj(
"bookmark",
"list",
"--all-remotes",
"-r",
headRevset,
"-T",
'name ++ "\\t" ++ remote ++ "\\n"',
)
if (!r.ok) return []
return parseBookmarks(r.stdout)
}
async function getDefaultBookmark(): Promise<BookmarkRef | null> {
const trunkR = await jj(
"bookmark",
"list",
"--all-remotes",
"-r",
"trunk()",
"-T",
'name ++ "\\t" ++ remote ++ "\\n"',
)
if (trunkR.ok) {
const bookmarks = parseBookmarks(trunkR.stdout)
if (bookmarks.length > 0) return bookmarks[0]
}
const all = await getBookmarks()
return (
all.find((b) => !b.remote && b.name === "main") ??
all.find((b) => !b.remote && b.name === "master") ??
all[0] ??
null
)
}
async function getRecentChanges(limit = 20): Promise<Change[]> {
const r = await jj(
"log",
"-n",
String(limit),
"--no-graph",
"-T",
'change_id.shortest(8) ++ "\\t" ++ description.first_line() ++ "\\n"',
)
if (!r.ok) return []
return parseChanges(r.stdout)
}
async function getMergeBase(
bookmark: string,
remote?: string,
): Promise<string | null> {
const ref: BookmarkRef = { name: bookmark, remote }
const r = await jj(
"log",
"-r",
`heads(::@ & ::${bookmarkRevset(ref)})`,
"--no-graph",
"-T",
'change_id.shortest(8) ++ "\\n"',
)
if (!r.ok) return null
const lines = r.stdout
.trim()
.split("\n")
.filter((l) => l.trim())
return lines.length === 1 ? lines[0].trim() : null
}
// -- PR materialization --------------------------------------------------
async function materializePr(prNumber: number): Promise<
| {
ok: true
title: string
baseBookmark: string
baseRemote?: string
savedChangeId: string
}
| { ok: false; error: string }
> {
if (await hasWorkingCopyChanges()) {
return {
ok: false,
error: "You have local jj changes. Snapshot or discard them first.",
}
}
const savedR = await jj(
"log",
"-r",
"@",
"--no-graph",
"-T",
"change_id.shortest(8)",
)
const savedChangeId = savedR.stdout.trim()
const prR = await gh(
"pr",
"view",
String(prNumber),
"--json",
"baseRefName,title,headRefName,isCrossRepository,headRepository,headRepositoryOwner",
)
if (!prR.ok) {
return {
ok: false,
error: `Could not find PR #${prNumber}. Check gh auth and that the PR exists.`,
}
}
let prInfo: {
baseRefName: string
title: string
headRefName: string
isCrossRepository: boolean
headRepository?: { name: string; url: string }
headRepositoryOwner?: { login: string }
}
try {
prInfo = JSON.parse(prR.stdout)
} catch {
return { ok: false, error: "Failed to parse PR info" }
}
const remotesR = await jj("git", "remote", "list")
const remotes = remotesR.stdout
.trim()
.split("\n")
.filter(Boolean)
.map((line) => {
const [name, ...urlParts] = line.split(/\s+/)
return { name, url: urlParts.join(" ") }
})
.filter((r) => r.name && r.url)
const defaultRemote =
remotes.find((r) => r.name === "origin") ?? remotes[0]
if (!defaultRemote) {
return { ok: false, error: "No jj remotes configured" }
}
let remoteName = defaultRemote.name
let addedTempRemote = false
if (prInfo.isCrossRepository) {
const repoSlug =
prInfo.headRepositoryOwner?.login && prInfo.headRepository?.name
? `${prInfo.headRepositoryOwner.login}/${prInfo.headRepository.name}`.toLowerCase()
: undefined
const forkUrl = prInfo.headRepository?.url
const existingRemote = remotes.find((r) => {
if (
forkUrl &&
normalizeRemoteUrl(r.url) === normalizeRemoteUrl(forkUrl)
)
return true
return repoSlug
? normalizeRemoteUrl(r.url).includes(
`github.com/${repoSlug}`,
)
: false
})
if (existingRemote) {
remoteName = existingRemote.name
} else if (forkUrl) {
const baseName = sanitizeRemoteName(
`gh-pr-${prInfo.headRepositoryOwner?.login ?? "remote"}-${prInfo.headRepository?.name ?? prNumber}`,
)
const names = new Set(remotes.map((r) => r.name))
remoteName = baseName
let suffix = 2
while (names.has(remoteName)) {
remoteName = `${baseName}-${suffix++}`
}
const addR = await jj(
"git",
"remote",
"add",
remoteName,
forkUrl,
)
if (!addR.ok) return { ok: false, error: "Failed to add PR remote" }
addedTempRemote = true
} else {
return { ok: false, error: "PR fork URL is unavailable" }
}
}
const fetchR = await jj(
"git",
"fetch",
"--remote",
remoteName,
"--branch",
prInfo.headRefName,
)
if (!fetchR.ok) {
if (addedTempRemote)
await jj("git", "remote", "remove", remoteName)
return { ok: false, error: "Failed to fetch PR branch" }
}
const revset = `remote_bookmarks(exact:${JSON.stringify(prInfo.headRefName)}, exact:${JSON.stringify(remoteName)})`
const newR = await jj("new", revset)
if (!newR.ok) {
if (addedTempRemote)
await jj("git", "remote", "remove", remoteName)
return { ok: false, error: "Failed to create change on PR branch" }
}
if (addedTempRemote) await jj("git", "remote", "remove", remoteName)
// Resolve base bookmark remote
const baseBms = await getBookmarks()
const baseRef = baseBms.find((b) => b.name === prInfo.baseRefName)
return {
ok: true,
title: prInfo.title,
baseBookmark: prInfo.baseRefName,
baseRemote: baseRef?.remote,
savedChangeId,
}
}
// -- prompt building -----------------------------------------------------
async function buildPrompt(target: ReviewTarget): Promise<string> {
switch (target.type) {
case "workingCopy":
return "Review the current working-copy changes (including new files). Use `jj status`, `jj diff --summary`, and `jj diff` to inspect."
case "baseBookmark": {
const label = bookmarkLabel({
name: target.bookmark,
remote: target.remote,
})
const mergeBase = await getMergeBase(
target.bookmark,
target.remote,
)
if (mergeBase) {
return `Review code changes against the base bookmark '${label}'. The merge-base change is ${mergeBase}. Run \`jj diff --from ${mergeBase} --to @\` to inspect the changes. Also check for local working-copy changes with \`jj diff --summary\`.`
}
return `Review code changes against the base bookmark '${label}'. Find the merge-base between @ and ${label}, then run \`jj diff --from <merge-base> --to @\`. Also check for local working-copy changes.`
}
case "change":
return target.title
? `Review the code changes introduced by change ${target.changeId} ("${target.title}"). Use \`jj show ${target.changeId}\` to inspect.`
: `Review the code changes introduced by change ${target.changeId}. Use \`jj show ${target.changeId}\` to inspect.`
case "pullRequest": {
const label = bookmarkLabel({
name: target.baseBookmark,
remote: target.baseRemote,
})
const mergeBase = await getMergeBase(
target.baseBookmark,
target.baseRemote,
)
if (mergeBase) {
return `Review pull request #${target.prNumber} ("${target.title}") against '${label}'. Merge-base is ${mergeBase}. Run \`jj diff --from ${mergeBase} --to @\` to inspect.`
}
return `Review pull request #${target.prNumber} ("${target.title}") against '${label}'. Find the merge-base and run \`jj diff --from <merge-base> --to @\`.`
}
case "folder":
return `Review the code in the following paths: ${target.paths.join(", ")}. This is a snapshot review (not a diff). Read the files directly.`
}
}
// -- review execution ----------------------------------------------------
async function startReview(target: ReviewTarget): Promise<void> {
const prompt = await buildPrompt(target)
await api.client.tui.clearPrompt()
await api.client.tui.appendPrompt({
body: { text: `@review ${prompt}` },
})
await api.client.tui.submitPrompt()
}
// -- dialogs -------------------------------------------------------------
function showReviewSelector(): void {
const options: TuiDialogSelectOption<string>[] = [
{
title: "Working-copy changes",
value: "workingCopy",
description: "Review uncommitted changes",
},
{
title: "Against a bookmark",
value: "baseBookmark",
description: "PR-style review against a base",
},
{
title: "A specific change",
value: "change",
description: "Review a single jj change",
},
{
title: "A pull request",
value: "pullRequest",
description: "Materialize and review a GitHub PR",
},
{
title: "A folder (snapshot)",
value: "folder",
description: "Review files directly, no diff",
},
]
api.ui.dialog.replace(
() =>
api.ui.DialogSelect({
title: "Review",
options,
onSelect: (option) => {
api.ui.dialog.clear()
switch (option.value) {
case "workingCopy":
void startReview({ type: "workingCopy" })
break
case "baseBookmark":
void showBookmarkSelector()
break
case "change":
void showChangeSelector()
break
case "pullRequest":
void showPrInput()
break
case "folder":
showFolderInput()
break
}
},
}),
() => api.ui.dialog.clear(),
)
}
async function showBookmarkSelector(): Promise<void> {
api.ui.toast({ message: "Loading bookmarks...", variant: "info" })
const allBookmarks = await getBookmarks()
const currentBookmarks = await getCurrentBookmarks()
const defaultBookmark = await getDefaultBookmark()
const currentKeys = new Set(
currentBookmarks.map((b) => `${b.name}@${b.remote ?? ""}`),
)
const candidates = allBookmarks.filter(
(b) => !currentKeys.has(`${b.name}@${b.remote ?? ""}`),
)
if (candidates.length === 0) {
api.ui.toast({
message: "No other bookmarks found",
variant: "error",
})
return
}
// Sort: default first, then local before remote
const defaultKey = defaultBookmark
? `${defaultBookmark.name}@${defaultBookmark.remote ?? ""}`
: null
const sorted = candidates.sort((a, b) => {
const aKey = `${a.name}@${a.remote ?? ""}`
const bKey = `${b.name}@${b.remote ?? ""}`
if (aKey === defaultKey) return -1
if (bKey === defaultKey) return 1
if (!!a.remote !== !!b.remote) return a.remote ? 1 : -1
return bookmarkLabel(a).localeCompare(bookmarkLabel(b))
})
const options: TuiDialogSelectOption<BookmarkRef>[] = sorted.map(
(b) => ({
title: bookmarkLabel(b),
value: b,
description:
`${b.name}@${b.remote ?? ""}` === defaultKey
? "(default)"
: b.remote
? `remote: ${b.remote}`
: undefined,
}),
)
api.ui.dialog.replace(
() =>
api.ui.DialogSelect({
title: "Base bookmark",
placeholder: "Filter bookmarks...",
options,
onSelect: (option) => {
api.ui.dialog.clear()
void startReview({
type: "baseBookmark",
bookmark: option.value.name,
remote: option.value.remote,
})
},
}),
() => api.ui.dialog.clear(),
)
}
async function showChangeSelector(): Promise<void> {
api.ui.toast({ message: "Loading changes...", variant: "info" })
const changes = await getRecentChanges()
if (changes.length === 0) {
api.ui.toast({ message: "No changes found", variant: "error" })
return
}
const options: TuiDialogSelectOption<Change>[] = changes.map((c) => ({
title: `${c.changeId} ${c.title}`,
value: c,
}))
api.ui.dialog.replace(
() =>
api.ui.DialogSelect({
title: "Change to review",
placeholder: "Filter changes...",
options,
onSelect: (option) => {
api.ui.dialog.clear()
void startReview({
type: "change",
changeId: option.value.changeId,
title: option.value.title,
})
},
}),
() => api.ui.dialog.clear(),
)
}
function showPrInput(): void {
api.ui.dialog.replace(
() =>
api.ui.DialogPrompt({
title: "PR number or URL",
placeholder:
"123 or https://github.com/owner/repo/pull/123",
onConfirm: (value) => {
const prNumber = parsePrRef(value)
if (!prNumber) {
api.ui.toast({
message:
"Invalid PR reference. Enter a number or GitHub PR URL.",
variant: "error",
})
return
}
api.ui.dialog.clear()
void handlePrReview(prNumber)
},
onCancel: () => api.ui.dialog.clear(),
}),
() => api.ui.dialog.clear(),
)
}
async function handlePrReview(prNumber: number): Promise<void> {
api.ui.toast({
message: `Materializing PR #${prNumber}...`,
variant: "info",
duration: 10000,
})
const result = await materializePr(prNumber)
if (!result.ok) {
api.ui.toast({ message: result.error, variant: "error" })
return
}
api.ui.toast({
message: `PR #${prNumber} materialized: ${result.title}`,
variant: "success",
})
await startReview({
type: "pullRequest",
prNumber,
baseBookmark: result.baseBookmark,
baseRemote: result.baseRemote,
title: result.title,
})
}
function showFolderInput(): void {
api.ui.dialog.replace(
() =>
api.ui.DialogPrompt({
title: "Paths to review",
placeholder: "src docs lib/utils.ts",
onConfirm: (value) => {
const paths = value
.split(/\s+/)
.map((p) => p.trim())
.filter(Boolean)
if (paths.length === 0) {
api.ui.toast({
message: "No paths provided",
variant: "error",
})
return
}
api.ui.dialog.clear()
void startReview({ type: "folder", paths })
},
onCancel: () => api.ui.dialog.clear(),
}),
() => api.ui.dialog.clear(),
)
}
// -- jj repo check -------------------------------------------------------
const inJjRepo = await isJjRepo()
// -- command registration ------------------------------------------------
api.command.register(() =>
inJjRepo
? [
{
title: "Review code changes",
value: "review",
description:
"Working-copy, bookmark, change, PR, or folder",
slash: { name: "review" },
onSelect: () => showReviewSelector(),
},
]
: [],
)
} }

View File

@@ -1,12 +0,0 @@
{
"id": "95b075f0",
"title": "Fix Wipr 2 mas installation failure in nixos-config",
"tags": [
"bugfix",
"mas",
"nix-darwin"
],
"status": "in_progress",
"created_at": "2026-03-29T18:55:14.812Z",
"assigned_to_session": "8318f7d4-ccd1-4467-b7c9-fb05e53e4a1d"
}

View File

@@ -1,15 +0,0 @@
{
"id": "e3f0bbbb",
"title": "Restore opencode alongside pi in nixos-config",
"tags": [
"history",
"nix",
"pi",
"opencode"
],
"status": "in_progress",
"created_at": "2026-03-31T20:49:44.402Z",
"assigned_to_session": "5607879f-b81a-4343-aa36-826e15630afc"
}
Find the repository change that replaced opencode with pi, then restore opencode while keeping pi side-by-side. Validate formatting/build/checks as required by AGENTS.md.

85
flake.lock generated
View File

@@ -850,86 +850,6 @@
"type": "github" "type": "github"
} }
}, },
"pi-agent-stuff": {
"flake": false,
"locked": {
"lastModified": 1774868285,
"narHash": "sha256-JKMqt5ionfF/aBFTSQe9BD49wAErNtEnf3Mnekk3nzk=",
"owner": "mitsuhiko",
"repo": "agent-stuff",
"rev": "80e1e96fa563ffc0c9d60422eac6dc9e67440385",
"type": "github"
},
"original": {
"owner": "mitsuhiko",
"repo": "agent-stuff",
"type": "github"
}
},
"pi-elixir": {
"flake": false,
"locked": {
"lastModified": 1772900407,
"narHash": "sha256-QoCPVdN5CYGe5288cJQmB10ds/UOucHIyG9z9E/4hsw=",
"owner": "dannote",
"repo": "pi-elixir",
"rev": "3b8f667beb696ce6ed456e762bfcf61e7326f5c4",
"type": "github"
},
"original": {
"owner": "dannote",
"repo": "pi-elixir",
"type": "github"
}
},
"pi-harness": {
"flake": false,
"locked": {
"lastModified": 1774881866,
"narHash": "sha256-d92ZkKIDQuI8a6WVTIedusmANn0nSQ2iteg8EQkdHmI=",
"owner": "aliou",
"repo": "pi-harness",
"rev": "ea8a2be4156f16761ee508fd538d526d2fca674f",
"type": "github"
},
"original": {
"owner": "aliou",
"repo": "pi-harness",
"type": "github"
}
},
"pi-mcp-adapter": {
"flake": false,
"locked": {
"lastModified": 1774247177,
"narHash": "sha256-HTexm+b+UUbJD4qwIqlNcVPhF/G7/MtBtXa0AdeztbY=",
"owner": "nicobailon",
"repo": "pi-mcp-adapter",
"rev": "c0919a29d263c2058c302641ddb04769c21be262",
"type": "github"
},
"original": {
"owner": "nicobailon",
"repo": "pi-mcp-adapter",
"type": "github"
}
},
"pi-rose-pine": {
"flake": false,
"locked": {
"lastModified": 1770936151,
"narHash": "sha256-6TzuWJPAn8zz+lUjZ3slFCNdPVd/Z2C+WoXFsLopk1g=",
"owner": "zenobi-us",
"repo": "pi-rose-pine",
"rev": "9b342f6e16d6b28c00c2f888ba2f050273981bdb",
"type": "github"
},
"original": {
"owner": "zenobi-us",
"repo": "pi-rose-pine",
"type": "github"
}
},
"pimalaya": { "pimalaya": {
"flake": false, "flake": false,
"locked": { "locked": {
@@ -994,11 +914,6 @@
"nixpkgs" "nixpkgs"
], ],
"nixvim": "nixvim", "nixvim": "nixvim",
"pi-agent-stuff": "pi-agent-stuff",
"pi-elixir": "pi-elixir",
"pi-harness": "pi-harness",
"pi-mcp-adapter": "pi-mcp-adapter",
"pi-rose-pine": "pi-rose-pine",
"qmd": "qmd", "qmd": "qmd",
"sops-nix": "sops-nix", "sops-nix": "sops-nix",
"zjstatus": "zjstatus" "zjstatus": "zjstatus"

View File

@@ -68,26 +68,6 @@
nixpkgs.url = "github:nixos/nixpkgs/master"; nixpkgs.url = "github:nixos/nixpkgs/master";
nixpkgs-lib.follows = "nixpkgs"; nixpkgs-lib.follows = "nixpkgs";
nixvim.url = "github:nix-community/nixvim"; nixvim.url = "github:nix-community/nixvim";
pi-agent-stuff = {
url = "github:mitsuhiko/agent-stuff";
flake = false;
};
pi-elixir = {
url = "github:dannote/pi-elixir";
flake = false;
};
pi-harness = {
url = "github:aliou/pi-harness";
flake = false;
};
pi-mcp-adapter = {
url = "github:nicobailon/pi-mcp-adapter";
flake = false;
};
pi-rose-pine = {
url = "github:zenobi-us/pi-rose-pine";
flake = false;
};
qmd.url = "github:tobi/qmd"; qmd.url = "github:tobi/qmd";
sops-nix = { sops-nix = {
url = "github:Mic92/sops-nix"; url = "github:Mic92/sops-nix";

View File

@@ -1,27 +0,0 @@
# AGENTS.md
## Version Control
- Use `jj` for version control, not `git`.
- `jj tug` is an alias for `jj bookmark move --from closest_bookmark(@-) --to @-`.
- Never attempt historically destructive Git commands.
- Make small, frequent commits.
## Scripting
- Use Nushell (`nu`) for scripting.
- Do not use Python, Perl, Lua, awk, or any other scripting language. You are programatically blocked from doing so.
## Workflow
- Always complete the requested work.
- If there is any ambiguity about what to do next, do NOT make a decision yourself. Stop your work and ask.
- Do not end with “If you want me to…” or “I can…”; take the next necessary step and finish the job without waiting for additional confirmation.
- Do not future-proof things. Stick to the original plan.
- Do not add fallbacks or backward compatibility unless explicitly required by the user. By default, replace the previous implementation with the new one entirely.
## Validation
- Do not ignore failing tests or checks, even if they appear unrelated to your changes.
- After completing and validating your work, the final step is to run the project's full validation and test commands and ensure they all pass.

View File

@@ -1,190 +0,0 @@
/**
* No Git Extension
*
* Blocks direct git invocations and tells the LLM to use jj (Jujutsu) instead.
* Mentions of the word "git" in search patterns, strings, comments, etc. are allowed.
*/
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
import { isToolCallEventType } from "@mariozechner/pi-coding-agent";
type ShellToken =
| { type: "word"; value: string }
| { type: "operator"; value: string };
const COMMAND_PREFIXES = new Set(["env", "command", "builtin", "time", "sudo", "nohup", "nice"]);
const SHELL_KEYWORDS = new Set(["if", "then", "elif", "else", "do", "while", "until", "case", "in"]);
const SHELL_INTERPRETERS = new Set(["bash", "sh", "zsh", "fish", "nu"]);
function isAssignmentWord(value: string): boolean {
return /^[A-Za-z_][A-Za-z0-9_]*=.*/.test(value);
}
function tokenizeShell(command: string): ShellToken[] {
const tokens: ShellToken[] = [];
let current = "";
let quote: "'" | '"' | null = null;
const pushWord = () => {
if (!current) return;
tokens.push({ type: "word", value: current });
current = "";
};
for (let i = 0; i < command.length; i++) {
const char = command[i];
if (quote) {
if (quote === "'") {
if (char === "'") {
quote = null;
} else {
current += char;
}
continue;
}
if (char === '"') {
quote = null;
continue;
}
if (char === "\\") {
if (i + 1 < command.length) {
current += command[i + 1];
i += 1;
}
continue;
}
current += char;
continue;
}
if (char === "'" || char === '"') {
quote = char;
continue;
}
if (char === "\\") {
if (i + 1 < command.length) {
current += command[i + 1];
i += 1;
}
continue;
}
if (/\s/.test(char)) {
pushWord();
if (char === "\n") {
tokens.push({ type: "operator", value: "\n" });
}
continue;
}
const twoCharOperator = command.slice(i, i + 2);
if (twoCharOperator === "&&" || twoCharOperator === "||") {
pushWord();
tokens.push({ type: "operator", value: twoCharOperator });
i += 1;
continue;
}
if (char === ";" || char === "|" || char === "(" || char === ")") {
pushWord();
tokens.push({ type: "operator", value: char });
continue;
}
current += char;
}
pushWord();
return tokens;
}
function findCommandWord(words: string[]): { word?: string; index: number } {
for (let i = 0; i < words.length; i++) {
const word = words[i];
if (SHELL_KEYWORDS.has(word)) {
continue;
}
if (isAssignmentWord(word)) {
continue;
}
if (COMMAND_PREFIXES.has(word)) {
continue;
}
return { word, index: i };
}
return { index: words.length };
}
function getInlineShellCommand(words: string[], commandIndex: number): string | null {
for (let i = commandIndex + 1; i < words.length; i++) {
const word = words[i];
if (/^(?:-[A-Za-z]*c[A-Za-z]*|--command)$/.test(word)) {
return words[i + 1] ?? null;
}
}
return null;
}
function segmentContainsBlockedGit(words: string[]): boolean {
const { word, index } = findCommandWord(words);
if (!word) {
return false;
}
if (word === "git") {
return true;
}
if (word === "jj") {
return false;
}
if (SHELL_INTERPRETERS.has(word)) {
const inlineCommand = getInlineShellCommand(words, index);
return inlineCommand ? containsBlockedGitInvocation(inlineCommand) : false;
}
return false;
}
function containsBlockedGitInvocation(command: string): boolean {
const tokens = tokenizeShell(command);
let words: string[] = [];
for (const token of tokens) {
if (token.type === "operator") {
if (segmentContainsBlockedGit(words)) {
return true;
}
words = [];
continue;
}
words.push(token.value);
}
return segmentContainsBlockedGit(words);
}
export default function (pi: ExtensionAPI) {
pi.on("tool_call", async (event, _ctx) => {
if (!isToolCallEventType("bash", event)) return;
const command = event.input.command.trim();
if (containsBlockedGitInvocation(command)) {
return {
block: true,
reason: "git is not used in this project. Use jj (Jujutsu) instead.",
};
}
});
}

View File

@@ -1,28 +0,0 @@
/**
* No Scripting Extension
*
* Blocks python, perl, ruby, php, lua, node -e, and inline bash/sh scripts.
* Tells the LLM to use `nu -c` instead.
*/
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
import { isToolCallEventType } from "@mariozechner/pi-coding-agent";
const SCRIPTING_PATTERN =
/(?:^|[;&|]\s*|&&\s*|\|\|\s*|\$\(\s*|`\s*)(?:python[23]?|perl|ruby|php|lua|node\s+-e|bash\s+-c|sh\s+-c)\s/;
export default function (pi: ExtensionAPI) {
pi.on("tool_call", async (event, _ctx) => {
if (!isToolCallEventType("bash", event)) return;
const command = event.input.command.trim();
if (SCRIPTING_PATTERN.test(command)) {
return {
block: true,
reason:
"Do not use python, perl, ruby, php, lua, node -e, or inline bash/sh for scripting. Use `nu -c` instead.",
};
}
});
}

View File

@@ -1,687 +0,0 @@
import { readFile, writeFile, mkdir, readdir } from "node:fs/promises";
import * as fs from "node:fs";
import * as os from "node:os";
import * as path from "node:path";
import * as crypto from "node:crypto";
import { Box, Text } from "@mariozechner/pi-tui";
import type { ExtensionAPI, ExtensionContext, ExtensionCommandContext, Model } from "@mariozechner/pi-coding-agent";
import {
createAgentSession,
DefaultResourceLoader,
getAgentDir,
SessionManager,
SettingsManager,
} from "@mariozechner/pi-coding-agent";
interface IngestManifest {
version: number;
job_id: string;
note_id: string;
operation: string;
requested_at: string;
title: string;
source_relpath: string;
source_path: string;
input_path: string;
archive_path: string;
output_path: string;
transcript_path: string;
result_path: string;
session_dir: string;
source_hash: string;
last_generated_output_hash?: string | null;
force_overwrite_generated?: boolean;
source_transport?: string;
}
interface IngestResult {
success: boolean;
job_id: string;
note_id: string;
archive_path: string;
source_hash: string;
session_dir: string;
output_path?: string;
output_hash?: string;
conflict_path?: string;
write_mode?: "create" | "overwrite" | "force-overwrite" | "conflict";
updated_main_output?: boolean;
transcript_path?: string;
error?: string;
}
interface FrontmatterInfo {
values: Record<string, string>;
body: string;
}
interface RenderedPage {
path: string;
image: {
type: "image";
source: {
type: "base64";
mediaType: string;
data: string;
};
};
}
const TRANSCRIBE_SKILL = "notability-transcribe";
const NORMALIZE_SKILL = "notability-normalize";
const STATUS_TYPE = "notability-status";
const DEFAULT_TRANSCRIBE_THINKING = "low" as const;
const DEFAULT_NORMALIZE_THINKING = "off" as const;
const PREFERRED_VISION_MODEL: [string, string] = ["openai-codex", "gpt-5.4"];
function getNotesRoot(): string {
return process.env.NOTABILITY_NOTES_DIR ?? path.join(os.homedir(), "Notes");
}
function getDataRoot(): string {
return process.env.NOTABILITY_DATA_ROOT ?? path.join(os.homedir(), ".local", "share", "notability-ingest");
}
function getRenderRoot(): string {
return process.env.NOTABILITY_RENDER_ROOT ?? path.join(getDataRoot(), "rendered-pages");
}
function getNotabilityScriptDir(): string {
return path.join(getAgentDir(), "notability");
}
function getSkillPath(skillName: string): string {
return path.join(getAgentDir(), "skills", skillName, "SKILL.md");
}
function stripFrontmatterBlock(text: string): string {
const trimmed = text.trim();
if (!trimmed.startsWith("---\n")) return trimmed;
const end = trimmed.indexOf("\n---\n", 4);
if (end === -1) return trimmed;
return trimmed.slice(end + 5).trim();
}
function stripCodeFence(text: string): string {
const trimmed = text.trim();
const match = trimmed.match(/^```(?:markdown|md)?\n([\s\S]*?)\n```$/i);
return match ? match[1].trim() : trimmed;
}
function parseFrontmatter(text: string): FrontmatterInfo {
const trimmed = stripCodeFence(text);
if (!trimmed.startsWith("---\n")) {
return { values: {}, body: trimmed };
}
const end = trimmed.indexOf("\n---\n", 4);
if (end === -1) {
return { values: {}, body: trimmed };
}
const block = trimmed.slice(4, end);
const body = trimmed.slice(end + 5).trim();
const values: Record<string, string> = {};
for (const line of block.split("\n")) {
const idx = line.indexOf(":");
if (idx === -1) continue;
const key = line.slice(0, idx).trim();
const value = line.slice(idx + 1).trim();
values[key] = value;
}
return { values, body };
}
function quoteYaml(value: string): string {
return JSON.stringify(value);
}
function sha256(content: string | Buffer): string {
return crypto.createHash("sha256").update(content).digest("hex");
}
async function sha256File(filePath: string): Promise<string> {
const buffer = await readFile(filePath);
return sha256(buffer);
}
function extractTitle(normalized: string, fallbackTitle: string): string {
const parsed = parseFrontmatter(normalized);
const frontmatterTitle = parsed.values.title?.replace(/^['"]|['"]$/g, "").trim();
if (frontmatterTitle) return frontmatterTitle;
const heading = parsed.body
.split("\n")
.map((line) => line.trim())
.find((line) => line.startsWith("# "));
if (heading) return heading.replace(/^#\s+/, "").trim();
return fallbackTitle;
}
function sourceFormat(filePath: string): string {
const extension = path.extname(filePath).toLowerCase();
if (extension === ".pdf") return "pdf";
if (extension === ".png") return "png";
return extension.replace(/^\./, "") || "unknown";
}
function buildMarkdown(manifest: IngestManifest, normalized: string): string {
const parsed = parseFrontmatter(normalized);
const title = extractTitle(normalized, manifest.title);
const now = new Date().toISOString().replace(/\.\d{3}Z$/, "Z");
const created = manifest.requested_at.slice(0, 10);
const body = parsed.body.trim();
const outputBody = body.length > 0 ? body : `# ${title}\n`;
return [
"---",
`title: ${quoteYaml(title)}`,
`created: ${quoteYaml(created)}`,
`updated: ${quoteYaml(now.slice(0, 10))}`,
`source: ${quoteYaml("notability")}`,
`source_transport: ${quoteYaml(manifest.source_transport ?? "webdav")}`,
`source_relpath: ${quoteYaml(manifest.source_relpath)}`,
`note_id: ${quoteYaml(manifest.note_id)}`,
`managed_by: ${quoteYaml("notability-ingest")}`,
`source_file: ${quoteYaml(manifest.archive_path)}`,
`source_file_hash: ${quoteYaml(`sha256:${manifest.source_hash}`)}`,
`source_format: ${quoteYaml(sourceFormat(manifest.archive_path))}`,
`status: ${quoteYaml("active")}`,
"tags:",
" - handwritten",
" - notability",
"---",
"",
outputBody,
"",
].join("\n");
}
function conflictPathFor(outputPath: string): string {
const parsed = path.parse(outputPath);
const stamp = new Date().toISOString().replace(/[:]/g, "-").replace(/\.\d{3}Z$/, "Z");
return path.join(parsed.dir, `${parsed.name}.conflict-${stamp}${parsed.ext}`);
}
async function ensureParent(filePath: string): Promise<void> {
await mkdir(path.dirname(filePath), { recursive: true });
}
async function loadSkillText(skillName: string): Promise<string> {
const raw = await readFile(getSkillPath(skillName), "utf8");
return stripFrontmatterBlock(raw).trim();
}
function normalizePathArg(arg: string): string {
return arg.startsWith("@") ? arg.slice(1) : arg;
}
function resolveModel(ctx: ExtensionContext, requireImage = false): Model {
const available = ctx.modelRegistry.getAvailable();
const matching = requireImage ? available.filter((model) => model.input.includes("image")) : available;
if (matching.length === 0) {
throw new Error(
requireImage
? "No image-capable model configured for pi note ingestion"
: "No available model configured for pi note ingestion",
);
}
if (ctx.model && (!requireImage || ctx.model.input.includes("image"))) {
if (!requireImage) return ctx.model;
}
if (requireImage) {
const [provider, id] = PREFERRED_VISION_MODEL;
const preferred = matching.find((model) => model.provider === provider && model.id === id);
if (preferred) return preferred;
const subscriptionModel = matching.find(
(model) => model.provider !== "opencode" && model.provider !== "opencode-go",
);
if (subscriptionModel) return subscriptionModel;
}
if (ctx.model && (!requireImage || ctx.model.input.includes("image"))) {
return ctx.model;
}
return matching[0];
}
async function runSkillPrompt(
ctx: ExtensionContext,
systemPrompt: string,
prompt: string,
images: RenderedPage[] = [],
thinkingLevel: "off" | "low" = "off",
): Promise<string> {
if (images.length > 0) {
const model = resolveModel(ctx, true);
const { execFile } = await import("node:child_process");
const promptPath = path.join(os.tmpdir(), `pi-note-ingest-${crypto.randomUUID()}.md`);
await writeFile(promptPath, `${prompt}\n`);
const args = [
"45s",
"pi",
"--model",
`${model.provider}/${model.id}`,
"--thinking",
thinkingLevel,
"--no-tools",
"--no-session",
"-p",
...images.map((page) => `@${page.path}`),
`@${promptPath}`,
];
try {
const output = await new Promise<string>((resolve, reject) => {
execFile("timeout", args, { cwd: ctx.cwd, env: process.env, maxBuffer: 10 * 1024 * 1024 }, (error, stdout, stderr) => {
if ((stdout ?? "").trim().length > 0) {
resolve(stdout);
return;
}
if (error) {
reject(new Error(stderr || stdout || error.message));
return;
}
resolve(stdout);
});
});
return stripCodeFence(output).trim();
} finally {
try {
fs.unlinkSync(promptPath);
} catch {
// Ignore temp file cleanup failures.
}
}
}
const agentDir = getAgentDir();
const settingsManager = SettingsManager.create(ctx.cwd, agentDir);
const resourceLoader = new DefaultResourceLoader({
cwd: ctx.cwd,
agentDir,
settingsManager,
noExtensions: true,
noPromptTemplates: true,
noThemes: true,
noSkills: true,
systemPromptOverride: () => systemPrompt,
appendSystemPromptOverride: () => [],
agentsFilesOverride: () => ({ agentsFiles: [] }),
});
await resourceLoader.reload();
const { session } = await createAgentSession({
model: resolveModel(ctx, images.length > 0),
thinkingLevel,
sessionManager: SessionManager.inMemory(),
modelRegistry: ctx.modelRegistry,
resourceLoader,
tools: [],
});
let output = "";
const unsubscribe = session.subscribe((event) => {
if (event.type === "message_update" && event.assistantMessageEvent.type === "text_delta") {
output += event.assistantMessageEvent.delta;
}
});
try {
await session.prompt(prompt, {
images: images.map((page) => page.image),
});
} finally {
unsubscribe();
}
if (!output.trim()) {
const assistantMessages = session.messages.filter((message) => message.role === "assistant");
const lastAssistant = assistantMessages.at(-1);
if (lastAssistant && Array.isArray(lastAssistant.content)) {
output = lastAssistant.content
.filter((part) => part.type === "text")
.map((part) => part.text)
.join("");
}
}
session.dispose();
return stripCodeFence(output).trim();
}
async function renderPdfPages(pdfPath: string, jobId: string): Promise<RenderedPage[]> {
const renderDir = path.join(getRenderRoot(), jobId);
await mkdir(renderDir, { recursive: true });
const prefix = path.join(renderDir, "page");
const args = ["-png", "-r", "200", pdfPath, prefix];
const { execFile } = await import("node:child_process");
await new Promise<void>((resolve, reject) => {
execFile("pdftoppm", args, (error) => {
if (error) reject(error);
else resolve();
});
});
const entries = await readdir(renderDir);
const pngs = entries
.filter((entry) => entry.endsWith(".png"))
.sort((left, right) => left.localeCompare(right, undefined, { numeric: true }));
if (pngs.length === 0) {
throw new Error(`No rendered pages produced for ${pdfPath}`);
}
const pages: RenderedPage[] = [];
for (const entry of pngs) {
const pagePath = path.join(renderDir, entry);
const buffer = await readFile(pagePath);
pages.push({
path: pagePath,
image: {
type: "image",
source: {
type: "base64",
mediaType: "image/png",
data: buffer.toString("base64"),
},
},
});
}
return pages;
}
async function loadImagePage(imagePath: string): Promise<RenderedPage> {
const extension = path.extname(imagePath).toLowerCase();
const mediaType = extension === ".png" ? "image/png" : undefined;
if (!mediaType) {
throw new Error(`Unsupported image input format for ${imagePath}`);
}
const buffer = await readFile(imagePath);
return {
path: imagePath,
image: {
type: "image",
source: {
type: "base64",
mediaType,
data: buffer.toString("base64"),
},
},
};
}
async function renderInputPages(inputPath: string, jobId: string): Promise<RenderedPage[]> {
const extension = path.extname(inputPath).toLowerCase();
if (extension === ".pdf") {
return await renderPdfPages(inputPath, jobId);
}
if (extension === ".png") {
return [await loadImagePage(inputPath)];
}
throw new Error(`Unsupported Notability input format: ${inputPath}`);
}
async function findManagedOutputs(noteId: string): Promise<string[]> {
const matches: string[] = [];
const stack = [getNotesRoot()];
while (stack.length > 0) {
const currentDir = stack.pop();
if (!currentDir || !fs.existsSync(currentDir)) continue;
const entries = await readdir(currentDir, { withFileTypes: true });
for (const entry of entries) {
if (entry.name.startsWith(".")) continue;
const fullPath = path.join(currentDir, entry.name);
if (entry.isDirectory()) {
stack.push(fullPath);
continue;
}
if (!entry.isFile() || !entry.name.endsWith(".md")) continue;
try {
const parsed = parseFrontmatter(await readFile(fullPath, "utf8"));
const managedBy = parsed.values.managed_by?.replace(/^['"]|['"]$/g, "");
const frontmatterNoteId = parsed.values.note_id?.replace(/^['"]|['"]$/g, "");
if (managedBy === "notability-ingest" && frontmatterNoteId === noteId) {
matches.push(fullPath);
}
} catch {
// Ignore unreadable or malformed files while scanning the notebook.
}
}
}
return matches.sort();
}
async function resolveManagedOutputPath(noteId: string, configuredOutputPath: string): Promise<string> {
if (fs.existsSync(configuredOutputPath)) {
const parsed = parseFrontmatter(await readFile(configuredOutputPath, "utf8"));
const managedBy = parsed.values.managed_by?.replace(/^['"]|['"]$/g, "");
const frontmatterNoteId = parsed.values.note_id?.replace(/^['"]|['"]$/g, "");
if (managedBy === "notability-ingest" && frontmatterNoteId === noteId) {
return configuredOutputPath;
}
}
const discovered = await findManagedOutputs(noteId);
if (discovered.length === 0) return configuredOutputPath;
if (discovered.length === 1) return discovered[0];
throw new Error(
`Multiple managed note files found for ${noteId}: ${discovered.join(", ")}`,
);
}
async function determineWriteTarget(manifest: IngestManifest, markdown: string): Promise<{
outputPath: string;
writePath: string;
writeMode: "create" | "overwrite" | "force-overwrite" | "conflict";
updatedMainOutput: boolean;
}> {
const outputPath = await resolveManagedOutputPath(manifest.note_id, manifest.output_path);
if (!fs.existsSync(outputPath)) {
return { outputPath, writePath: outputPath, writeMode: "create", updatedMainOutput: true };
}
const existing = await readFile(outputPath, "utf8");
const existingHash = sha256(existing);
const parsed = parseFrontmatter(existing);
const isManaged = parsed.values.managed_by?.replace(/^['"]|['"]$/g, "") === "notability-ingest";
const sameNoteId = parsed.values.note_id?.replace(/^['"]|['"]$/g, "") === manifest.note_id;
if (manifest.last_generated_output_hash && existingHash === manifest.last_generated_output_hash) {
return { outputPath, writePath: outputPath, writeMode: "overwrite", updatedMainOutput: true };
}
if (manifest.force_overwrite_generated && isManaged && sameNoteId) {
return { outputPath, writePath: outputPath, writeMode: "force-overwrite", updatedMainOutput: true };
}
return {
outputPath,
writePath: conflictPathFor(outputPath),
writeMode: "conflict",
updatedMainOutput: false,
};
}
async function writeIngestResult(resultPath: string, payload: IngestResult): Promise<void> {
await ensureParent(resultPath);
await writeFile(resultPath, JSON.stringify(payload, null, 2));
}
async function ingestManifest(manifestPath: string, ctx: ExtensionContext): Promise<IngestResult> {
const manifest = JSON.parse(await readFile(manifestPath, "utf8")) as IngestManifest;
await ensureParent(manifest.transcript_path);
await ensureParent(manifest.result_path);
await mkdir(manifest.session_dir, { recursive: true });
const normalizeSkill = await loadSkillText(NORMALIZE_SKILL);
const pages = await renderInputPages(manifest.input_path, manifest.job_id);
const pageSummary = pages.map((page, index) => `- page ${index + 1}: ${page.path}`).join("\n");
const transcriptPrompt = [
"Transcribe this note into clean Markdown.",
"Read it like a human and preserve the intended reading order and visible structure.",
"Keep headings, lists, and paragraphs when they are visible.",
"Do not summarize. Do not add commentary. Return Markdown only.",
"Rendered pages:",
pageSummary,
].join("\n\n");
let transcript = await runSkillPrompt(
ctx,
"",
transcriptPrompt,
pages,
DEFAULT_TRANSCRIBE_THINKING,
);
if (!transcript.trim()) {
throw new Error("Transcription skill returned empty output");
}
await writeFile(manifest.transcript_path, `${transcript.trim()}\n`);
const normalizePrompt = [
`Note ID: ${manifest.note_id}`,
`Source path: ${manifest.source_relpath}`,
`Preferred output path: ${manifest.output_path}`,
"Normalize the following transcription into clean Markdown.",
"Restore natural prose formatting and intended reading order when the transcription contains OCR or layout artifacts.",
"If words are split across separate lines but clearly belong to the same phrase or sentence, merge them.",
"Return only Markdown. No code fences.",
"",
"<transcription>",
transcript.trim(),
"</transcription>",
].join("\n");
const normalized = await runSkillPrompt(
ctx,
normalizeSkill,
normalizePrompt,
[],
DEFAULT_NORMALIZE_THINKING,
);
if (!normalized.trim()) {
throw new Error("Normalization skill returned empty output");
}
const markdown = buildMarkdown(manifest, normalized);
const target = await determineWriteTarget(manifest, markdown);
await ensureParent(target.writePath);
await writeFile(target.writePath, markdown);
const result: IngestResult = {
success: true,
job_id: manifest.job_id,
note_id: manifest.note_id,
archive_path: manifest.archive_path,
source_hash: manifest.source_hash,
session_dir: manifest.session_dir,
output_path: target.outputPath,
output_hash: target.updatedMainOutput ? await sha256File(target.writePath) : undefined,
conflict_path: target.writeMode === "conflict" ? target.writePath : undefined,
write_mode: target.writeMode,
updated_main_output: target.updatedMainOutput,
transcript_path: manifest.transcript_path,
};
await writeIngestResult(manifest.result_path, result);
return result;
}
async function runScript(scriptName: string, args: string[]): Promise<string> {
const { execFile } = await import("node:child_process");
const scriptPath = path.join(getNotabilityScriptDir(), scriptName);
return await new Promise<string>((resolve, reject) => {
execFile("nu", [scriptPath, ...args], (error, stdout, stderr) => {
if (error) {
reject(new Error(stderr || stdout || error.message));
return;
}
resolve(stdout.trim());
});
});
}
function splitArgs(input: string): string[] {
return input
.trim()
.split(/\s+/)
.filter((part) => part.length > 0);
}
function postStatus(pi: ExtensionAPI, content: string): void {
pi.sendMessage({
customType: STATUS_TYPE,
content,
display: true,
});
}
export default function noteIngestExtension(pi: ExtensionAPI) {
pi.registerMessageRenderer(STATUS_TYPE, (message, _options, theme) => {
const box = new Box(1, 1, (text) => theme.bg("customMessageBg", text));
box.addChild(new Text(message.content, 0, 0));
return box;
});
pi.registerCommand("note-status", {
description: "Show Notability ingest status",
handler: async (args, _ctx) => {
const output = await runScript("status.nu", splitArgs(args));
postStatus(pi, output.length > 0 ? output : "No status output");
},
});
pi.registerCommand("note-reingest", {
description: "Enqueue a note for reingestion",
handler: async (args, _ctx) => {
const trimmed = args.trim();
if (!trimmed) {
postStatus(pi, "Usage: /note-reingest <note-id> [--latest-source|--latest-archive] [--force-overwrite-generated]");
return;
}
const output = await runScript("reingest.nu", splitArgs(trimmed));
postStatus(pi, output.length > 0 ? output : "Reingest enqueued");
},
});
pi.registerCommand("note-ingest", {
description: "Ingest a queued Notability job manifest",
handler: async (args, ctx: ExtensionCommandContext) => {
const manifestPath = normalizePathArg(args.trim());
if (!manifestPath) {
throw new Error("Usage: /note-ingest <job.json>");
}
let resultPath = "";
try {
const raw = await readFile(manifestPath, "utf8");
const manifest = JSON.parse(raw) as IngestManifest;
resultPath = manifest.result_path;
const result = await ingestManifest(manifestPath, ctx);
postStatus(pi, `Ingested ${result.note_id} (${result.write_mode})`);
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
if (resultPath) {
const manifest = JSON.parse(await readFile(manifestPath, "utf8")) as IngestManifest;
await writeIngestResult(resultPath, {
success: false,
job_id: manifest.job_id,
note_id: manifest.note_id,
archive_path: manifest.archive_path,
source_hash: manifest.source_hash,
session_dir: manifest.session_dir,
error: message,
});
}
throw error;
}
},
});
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,260 +0,0 @@
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
import {
createAgentSession,
DefaultResourceLoader,
getAgentDir,
SessionManager,
SettingsManager,
} from "@mariozechner/pi-coding-agent";
interface SessionNameState {
hasAutoNamed: boolean;
}
const TITLE_MODEL = {
provider: "openai-codex",
id: "gpt-5.4-mini",
} as const;
const MAX_TITLE_LENGTH = 50;
const MAX_RETRIES = 2;
const FALLBACK_LENGTH = 50;
const TITLE_ENTRY_TYPE = "vendored-session-title";
const TITLE_SYSTEM_PROMPT = `You are generating a succinct title for a coding session based on the provided conversation.
Requirements:
- Maximum 50 characters
- Sentence case (capitalize only first word and proper nouns)
- Capture the main intent or task
- Reuse the user's exact words and technical terms
- Match the user's language
- No quotes, colons, or markdown formatting
- No generic titles like "Coding session" or "Help with code"
- No explanations or commentary
Output ONLY the title text. Nothing else.`;
function isTurnCompleted(event: unknown): boolean {
if (!event || typeof event !== "object") return false;
const message = (event as { message?: unknown }).message;
if (!message || typeof message !== "object") return false;
const stopReason = (message as { stopReason?: unknown }).stopReason;
return typeof stopReason === "string" && stopReason.toLowerCase() === "stop";
}
function buildFallbackTitle(userText: string): string {
const text = userText.trim();
if (text.length <= FALLBACK_LENGTH) return text;
const truncated = text.slice(0, FALLBACK_LENGTH - 3);
const lastSpace = truncated.lastIndexOf(" ");
return `${lastSpace > 0 ? truncated.slice(0, lastSpace) : truncated}...`;
}
function postProcessTitle(raw: string): string {
let title = raw;
title = title.replace(/<thinking[\s\S]*?<\/thinking>\s*/g, "");
title = title.replace(/^["'`]+|["'`]+$/g, "");
title = title.replace(/^#+\s*/, "");
title = title.replace(/\*{1,2}(.*?)\*{1,2}/g, "$1");
title = title.replace(/_{1,2}(.*?)_{1,2}/g, "$1");
title = title.replace(/^(Title|Summary|Session)\s*:\s*/i, "");
title =
title
.split("\n")
.map((line) => line.trim())
.find((line) => line.length > 0) ?? title;
title = title.trim();
if (title.length > MAX_TITLE_LENGTH) {
const truncated = title.slice(0, MAX_TITLE_LENGTH - 3);
const lastSpace = truncated.lastIndexOf(" ");
title = `${lastSpace > 0 ? truncated.slice(0, lastSpace) : truncated}...`;
}
return title;
}
function getLatestUserText(ctx: ExtensionContext): string | null {
const entries = ctx.sessionManager.getEntries();
for (let i = entries.length - 1; i >= 0; i -= 1) {
const entry = entries[i];
if (!entry || entry.type !== "message") continue;
if (entry.message.role !== "user") continue;
const { content } = entry.message as { content: unknown };
if (typeof content === "string") return content;
if (!Array.isArray(content)) return null;
return content
.filter(
(part): part is { type: string; text?: string } =>
typeof part === "object" && part !== null && "type" in part,
)
.filter((part) => part.type === "text" && typeof part.text === "string")
.map((part) => part.text ?? "")
.join(" ");
}
return null;
}
function getLatestAssistantText(ctx: ExtensionContext): string | null {
const entries = ctx.sessionManager.getEntries();
for (let i = entries.length - 1; i >= 0; i -= 1) {
const entry = entries[i];
if (!entry || entry.type !== "message") continue;
if (entry.message.role !== "assistant") continue;
const { content } = entry.message as { content: unknown };
if (typeof content === "string") return content;
if (!Array.isArray(content)) return null;
return content
.filter(
(part): part is { type: string; text?: string } =>
typeof part === "object" && part !== null && "type" in part,
)
.filter((part) => part.type === "text" && typeof part.text === "string")
.map((part) => part.text ?? "")
.join("\n");
}
return null;
}
function resolveModel(ctx: ExtensionContext) {
const available = ctx.modelRegistry.getAvailable();
const model = available.find(
(candidate) => candidate.provider === TITLE_MODEL.provider && candidate.id === TITLE_MODEL.id,
);
if (model) return model;
const existsWithoutKey = ctx.modelRegistry
.getAll()
.some((candidate) => candidate.provider === TITLE_MODEL.provider && candidate.id === TITLE_MODEL.id);
if (existsWithoutKey) {
throw new Error(
`Model ${TITLE_MODEL.provider}/${TITLE_MODEL.id} exists but has no configured API key.`,
);
}
throw new Error(`Model ${TITLE_MODEL.provider}/${TITLE_MODEL.id} is not available.`);
}
async function generateTitle(userText: string, assistantText: string, ctx: ExtensionContext): Promise<string> {
const agentDir = getAgentDir();
const settingsManager = SettingsManager.create(ctx.cwd, agentDir);
const resourceLoader = new DefaultResourceLoader({
cwd: ctx.cwd,
agentDir,
settingsManager,
noExtensions: true,
noPromptTemplates: true,
noThemes: true,
noSkills: true,
systemPromptOverride: () => TITLE_SYSTEM_PROMPT,
appendSystemPromptOverride: () => [],
agentsFilesOverride: () => ({ agentsFiles: [] }),
});
await resourceLoader.reload();
const { session } = await createAgentSession({
model: resolveModel(ctx),
thinkingLevel: "off",
sessionManager: SessionManager.inMemory(),
modelRegistry: ctx.modelRegistry,
resourceLoader,
});
let accumulated = "";
const unsubscribe = session.subscribe((event) => {
if (event.type === "message_update" && event.assistantMessageEvent.type === "text_delta") {
accumulated += event.assistantMessageEvent.delta;
}
});
const description = assistantText
? `<user>${userText}</user>\n<assistant>${assistantText}</assistant>`
: `<user>${userText}</user>`;
const userMessage = `<conversation>\n${description}\n</conversation>\n\nGenerate a title:`;
try {
await session.prompt(userMessage);
} finally {
unsubscribe();
session.dispose();
}
return postProcessTitle(accumulated);
}
async function generateAndSetTitle(pi: ExtensionAPI, ctx: ExtensionContext): Promise<void> {
const userText = getLatestUserText(ctx);
if (!userText?.trim()) return;
const assistantText = getLatestAssistantText(ctx) ?? "";
if (!assistantText.trim()) return;
let lastError: Error | null = null;
for (let attempt = 1; attempt <= MAX_RETRIES; attempt += 1) {
try {
const title = await generateTitle(userText, assistantText, ctx);
if (!title) continue;
pi.setSessionName(title);
pi.appendEntry(TITLE_ENTRY_TYPE, {
title,
rawUserText: userText,
rawAssistantText: assistantText,
attempt,
model: `${TITLE_MODEL.provider}/${TITLE_MODEL.id}`,
});
ctx.ui.notify(`Session: ${title}`, "info");
return;
} catch (error) {
lastError = error instanceof Error ? error : new Error(String(error));
}
}
const fallback = buildFallbackTitle(userText);
pi.setSessionName(fallback);
pi.appendEntry(TITLE_ENTRY_TYPE, {
title: fallback,
fallback: true,
error: lastError?.message ?? "Unknown error",
rawUserText: userText,
rawAssistantText: assistantText,
model: `${TITLE_MODEL.provider}/${TITLE_MODEL.id}`,
});
ctx.ui.notify(`Title generation failed, using fallback: ${fallback}`, "warning");
}
export default function setupSessionNameHook(pi: ExtensionAPI) {
const state: SessionNameState = {
hasAutoNamed: false,
};
pi.on("session_start", async () => {
state.hasAutoNamed = false;
});
pi.on("session_switch", async () => {
state.hasAutoNamed = false;
});
pi.on("turn_end", async (event, ctx) => {
if (state.hasAutoNamed) return;
if (pi.getSessionName()) {
state.hasAutoNamed = true;
return;
}
if (!isTurnCompleted(event)) return;
await generateAndSetTitle(pi, ctx);
state.hasAutoNamed = true;
});
}

View File

@@ -1,21 +0,0 @@
{
"mcpServers": {
"opensrc": {
"command": "npx",
"args": ["-y", "opensrc-mcp"],
"lifecycle": "eager"
},
"context7": {
"url": "https://mcp.context7.com/mcp",
"lifecycle": "eager"
},
"grep_app": {
"url": "https://mcp.grep.app",
"lifecycle": "eager"
},
"sentry": {
"url": "https://mcp.sentry.dev/mcp",
"auth": "oauth"
}
}
}

View File

@@ -1,143 +0,0 @@
---
name: jujutsu
description: Manages version control with Jujutsu (jj), including rebasing, conflict resolution, and Git interop. Use when tracking changes, navigating history, squashing/splitting commits, or pushing to Git remotes.
---
# Jujutsu
Git-compatible VCS focused on concurrent development and ease of use.
> ⚠️ **Not Git!** Jujutsu syntax differs from Git:
>
> - Parent: `@-` not `@~1` or `@^`
> - Grandparent: `@--` not `@~2`
> - Child: `@+` not `@~-1`
> - Use `jj log` not `jj changes`
## Key Commands
| Command | Description |
| -------------------------- | -------------------------------------------- |
| `jj st` | Show working copy status |
| `jj log` | Show change log |
| `jj diff` | Show changes in working copy |
| `jj new` | Create new change |
| `jj desc` | Edit change description |
| `jj squash` | Move changes to parent |
| `jj split` | Split current change |
| `jj rebase -s src -d dest` | Rebase changes |
| `jj absorb` | Move changes into stack of mutable revisions |
| `jj bisect` | Find bad revision by bisection |
| `jj fix` | Update files with formatting fixes |
| `jj sign` | Cryptographically sign a revision |
| `jj metaedit` | Modify metadata without changing content |
## Basic Workflow
```bash
jj new # Create new change
jj desc -m "feat: add feature" # Set description
jj log # View history
jj edit change-id # Switch to change
jj new --before @ # Time travel (create before current)
jj edit @- # Go to parent
```
## Time Travel
```bash
jj edit change-id # Switch to specific change
jj next --edit # Next child change
jj edit @- # Parent change
jj new --before @ -m msg # Insert before current
```
## Merging & Rebasing
```bash
jj new x yz -m msg # Merge changes
jj rebase -s src -d dest # Rebase source onto dest
jj abandon # Delete current change
```
## Conflicts
```bash
jj resolve # Interactive conflict resolution
# Edit files, then continue
```
## Revset Syntax
**Parent/child operators:**
| Syntax | Meaning | Example |
| ------ | ---------------- | -------------------- |
| `@-` | Parent of @ | `jj diff -r @-` |
| `@--` | Grandparent | `jj log -r @--` |
| `x-` | Parent of x | `jj diff -r abc123-` |
| `@+` | Child of @ | `jj log -r @+` |
| `x::y` | x to y inclusive | `jj log -r main::@` |
| `x..y` | x to y exclusive | `jj log -r main..@` |
| `x\|y` | Union (or) | `jj log -r 'a \| b'` |
**⚠️ Common mistakes:**
-`@~1` → ✅ `@-` (parent)
-`@^` → ✅ `@-` (parent)
-`@~-1` → ✅ `@+` (child)
-`jj changes` → ✅ `jj log` or `jj diff`
-`a,b,c` → ✅ `a | b | c` (union uses pipe, not comma)
**Functions:**
```bash
jj log -r 'heads(all())' # All heads
jj log -r 'remote_bookmarks()..' # Not on remote
jj log -r 'author(name)' # By author
jj log -r 'description(regex)' # By description
jj log -r 'mine()' # My commits
jj log -r 'committer_date(after:"7 days ago")' # Recent commits
jj log -r 'mine() & committer_date(after:"yesterday")' # My recent
```
## Templates
```bash
jj log -T 'commit_id ++ "\n" ++ description'
```
## Git Interop
```bash
jj bookmark create main -r @ # Create bookmark
jj git push --bookmark main # Push bookmark
jj git fetch # Fetch from remote
jj bookmark track main@origin # Track remote
```
## Advanced Commands
```bash
jj absorb # Auto-move changes to relevant commits in stack
jj bisect start # Start bisection
jj bisect good # Mark current as good
jj bisect bad # Mark current as bad
jj fix # Run configured formatters on files
jj sign -r @ # Sign current revision
jj metaedit -r @ -m "new message" # Edit metadata only
```
## Tips
- No staging: changes are immediate
- Use conventional commits: `type(scope): desc`
- `jj undo` to revert operations
- `jj op log` to see operation history
- Bookmarks are like branches
- `jj absorb` is powerful for fixing up commits in a stack
## Related Skills
- **gh**: GitHub CLI for PRs and issues
- **review**: Code review before committing

View File

@@ -1,36 +0,0 @@
---
name: notability-normalize
description: Normalizes an exact Notability transcription into clean, searchable Markdown while preserving all original content and uncertainty markers. Use after a faithful transcription pass.
---
# Notability Normalize
You are doing a **Markdown normalization** pass on a previously transcribed Notability note.
## Rules
- Do **not** summarize.
- Do **not** remove uncertainty markers such as `[unclear: ...]`.
- Preserve all substantive content from the transcription.
- Clean up only formatting and Markdown structure.
- Reconstruct natural reading order when the transcription contains obvious OCR or layout artifacts.
- Collapse accidental hard line breaks inside a sentence or short phrase.
- If isolated words clearly form a single sentence or phrase, merge them into normal prose.
- Prefer readable Markdown headings, lists, and tables.
- Keep content in the same overall order as the transcription.
- Do not invent content.
- Do not output code fences.
- Output Markdown only.
## Output
- Produce a clean Markdown document.
- Include a top-level `#` heading if the note clearly has a title.
- Use standard Markdown lists and checkboxes.
- Represent tables as Markdown tables when practical.
- Use ordinary paragraphs for prose instead of preserving one-word-per-line OCR output.
- Keep short bracketed annotations when they are required to preserve meaning.
## Important
The source PDF remains the ground truth. When in doubt, preserve ambiguity instead of cleaning it away.

View File

@@ -1,38 +0,0 @@
---
name: notability-transcribe
description: Faithfully transcribes handwritten or mixed handwritten/typed Notability note pages into Markdown without summarizing. Use when converting note page images or PDFs into an exact textual transcription.
---
# Notability Transcribe
You are doing a **faithful transcription** pass for handwritten Notability notes.
## Rules
- Preserve the original order of content.
- Reconstruct the intended reading order from the page layout.
- Read the page in the order a human would: top-to-bottom and left-to-right, while respecting obvious grouping.
- Do **not** summarize, explain, clean up, or reorganize beyond what is necessary to transcribe faithfully.
- Preserve headings, bullets, numbered items, checkboxes, tables, separators, callouts, and obvious layout structure.
- Do **not** preserve accidental OCR-style hard line breaks when the note is clearly continuous prose or a single phrase.
- If words are staggered on the page but clearly belong to the same sentence, combine them into normal lines.
- If text is uncertain, keep the uncertainty inline as `[unclear: ...]`.
- If a word is partially legible, include the best reading and uncertainty marker.
- If there is a drawing or diagram that cannot be represented exactly, describe it minimally in brackets, for example `[diagram: arrow from A to B]`.
- Preserve language exactly as written.
- Do not invent missing words.
- Do not output code fences.
- Output Markdown only.
## Output shape
- Use headings when headings are clearly present.
- Use `- [ ]` or `- [x]` for checkboxes when visible.
- Use bullet lists for bullet lists.
- Use normal paragraphs or single-line phrases for continuous prose instead of one word per line.
- Keep side notes in the position that best preserves reading order.
- Insert blank lines between major sections.
## Safety
If a page is partly unreadable, still transcribe everything you can and mark uncertain content with `[unclear: ...]`.

View File

@@ -1,11 +1,28 @@
# Global AGENTS.md # AGENTS.md
## Version Control ## Version Control
- Use `jj` for VCS, not `git` - Use `jj` for version control, not `git`.
- `jj tug` is an alias for `jj bookmark move --from closest_bookmark(@-) --to @-` - `jj tug` is an alias for `jj bookmark move --from closest_bookmark(@-) --to @-`.
- Never attempt historically destructive Git commands.
- Make small, frequent commits.
- "Commit" means `jj commit`, not `jj desc`; `desc` stays on the same working copy.
## Scripting ## Scripting
- Always use Nushell (`nu`) for scripting - Use Nushell (`nu`) for scripting.
- Never use Python, Perl, Lua, awk, or any other scripting language - Do not use Python, Perl, Lua, awk, or any other scripting language. You are programatically blocked from doing so.
## Workflow
- Always complete the requested work.
- If there is any ambiguity about what to do next, do NOT make a decision yourself. Stop your work and ask.
- Do not end with “If you want me to…” or “I can…”; take the next necessary step and finish the job without waiting for additional confirmation.
- Do not future-proof things. Stick to the original plan.
- Do not add fallbacks or backward compatibility unless explicitly required by the user. By default, replace the previous implementation with the new one entirely.
## Validation
- Do not ignore failing tests or checks, even if they appear unrelated to your changes.
- After completing and validating your work, the final step is to run the project's full validation and test commands and ensure they all pass.

View File

@@ -1,10 +0,0 @@
{inputs, ...}: final: prev: {
pi-agent-stuff =
prev.buildNpmPackage {
pname = "pi-agent-stuff";
version = "1.5.0";
src = inputs.pi-agent-stuff;
npmDepsHash = "sha256-pyXMNdlie8vAkhz2f3GUGT3CCYuwt+xkWnsijBajXIo=";
dontNpmBuild = true;
};
}

View File

@@ -1,33 +0,0 @@
{inputs, ...}: final: prev: {
pi-harness =
prev.stdenvNoCC.mkDerivation {
pname = "pi-harness";
version = "0.0.0";
src = inputs.pi-harness;
pnpmDeps =
prev.fetchPnpmDeps {
pname = "pi-harness";
version = "0.0.0";
src = inputs.pi-harness;
pnpm = prev.pnpm_10;
fetcherVersion = 3;
hash = "sha256-lNcZRCmmwq9t05UjVWcuGq+ZzRHuHNmqKQIVPh6DoxQ=";
};
nativeBuildInputs = [
prev.pnpmConfigHook
prev.pnpm_10
prev.nodejs
];
dontBuild = true;
installPhase = ''
runHook preInstall
mkdir -p $out/lib/node_modules/@aliou/pi-harness
cp -r . $out/lib/node_modules/@aliou/pi-harness
runHook postInstall
'';
};
}

View File

@@ -1,10 +0,0 @@
{inputs, ...}: final: prev: {
pi-mcp-adapter =
prev.buildNpmPackage {
pname = "pi-mcp-adapter";
version = "2.2.0";
src = inputs.pi-mcp-adapter;
npmDepsHash = "sha256-myJ9h/zC/KDddt8NOVvJjjqbnkdEN4ZR+okCR5nu7hM=";
dontNpmBuild = true;
};
}

View File

@@ -123,68 +123,5 @@ in {
}; };
"opencode/AGENTS.md".source = ./_opencode/AGENTS.md; "opencode/AGENTS.md".source = ./_opencode/AGENTS.md;
}; };
home.file = {
"AGENTS.md".source = ./_ai-tools/AGENTS.md;
".pi/agent/extensions/pi-elixir" = {
source = inputs.pi-elixir;
recursive = true;
};
".pi/agent/extensions/pi-mcp-adapter" = {
source = "${pkgs.pi-mcp-adapter}/lib/node_modules/pi-mcp-adapter";
recursive = true;
};
".pi/agent/extensions/no-git.ts".source = ./_ai-tools/extensions/no-git.ts;
".pi/agent/extensions/no-scripting.ts".source = ./_ai-tools/extensions/no-scripting.ts;
".pi/agent/extensions/note-ingest.ts".source = ./_ai-tools/extensions/note-ingest.ts;
".pi/agent/extensions/review.ts".source = ./_ai-tools/extensions/review.ts;
".pi/agent/extensions/session-name.ts".source = ./_ai-tools/extensions/session-name.ts;
".pi/agent/notability" = {
source = ./_notability;
recursive = true;
};
".pi/agent/skills/elixir-dev" = {
source = "${inputs.pi-elixir}/skills/elixir-dev";
recursive = true;
};
".pi/agent/skills/jujutsu/SKILL.md".source = ./_ai-tools/skills/jujutsu/SKILL.md;
".pi/agent/skills/notability-transcribe/SKILL.md".source = ./_ai-tools/skills/notability-transcribe/SKILL.md;
".pi/agent/skills/notability-normalize/SKILL.md".source = ./_ai-tools/skills/notability-normalize/SKILL.md;
".pi/agent/themes" = {
source = "${inputs.pi-rose-pine}/themes";
recursive = true;
};
".pi/agent/settings.json".text =
builtins.toJSON {
theme = "rose-pine-dawn";
quietStartup = true;
hideThinkingBlock = true;
defaultProvider = "openai-codex";
defaultModel = "gpt-5.4";
defaultThinkingLevel = "high";
packages = [
{
source = "${pkgs.pi-agent-stuff}/lib/node_modules/mitsupi";
extensions = [
"pi-extensions/answer.ts"
"pi-extensions/context.ts"
"pi-extensions/multi-edit.ts"
"pi-extensions/todos.ts"
];
skills = [];
prompts = [];
themes = [];
}
{
source = "${pkgs.pi-harness}/lib/node_modules/@aliou/pi-harness";
extensions = ["extensions/breadcrumbs/index.ts"];
skills = [];
prompts = [];
themes = [];
}
];
};
".pi/agent/mcp.json".source = ./_ai-tools/mcp.json;
};
}; };
} }

View File

@@ -54,26 +54,6 @@
inputs.nixpkgs.follows = "nixpkgs"; inputs.nixpkgs.follows = "nixpkgs";
}; };
llm-agents.url = "github:numtide/llm-agents.nix"; llm-agents.url = "github:numtide/llm-agents.nix";
pi-agent-stuff = {
url = "github:mitsuhiko/agent-stuff";
flake = false;
};
pi-elixir = {
url = "github:dannote/pi-elixir";
flake = false;
};
pi-rose-pine = {
url = "github:zenobi-us/pi-rose-pine";
flake = false;
};
pi-harness = {
url = "github:aliou/pi-harness";
flake = false;
};
pi-mcp-adapter = {
url = "github:nicobailon/pi-mcp-adapter";
flake = false;
};
qmd.url = "github:tobi/qmd"; qmd.url = "github:tobi/qmd";
# Overlay inputs # Overlay inputs
himalaya.url = "github:pimalaya/himalaya"; himalaya.url = "github:pimalaya/himalaya";

View File

@@ -26,7 +26,6 @@ in {
]; ];
commonPath = with pkgs; commonPath = with pkgs;
[ [
inputs'.llm-agents.packages.pi
coreutils coreutils
inotify-tools inotify-tools
nushell nushell

View File

@@ -20,12 +20,6 @@
(import ./_overlays/jj-ryu.nix {inherit inputs;}) (import ./_overlays/jj-ryu.nix {inherit inputs;})
# cog-cli # cog-cli
(import ./_overlays/cog-cli.nix {inherit inputs;}) (import ./_overlays/cog-cli.nix {inherit inputs;})
# pi-agent-stuff (mitsuhiko)
(import ./_overlays/pi-agent-stuff.nix {inherit inputs;})
# pi-harness (aliou)
(import ./_overlays/pi-harness.nix {inherit inputs;})
# pi-mcp-adapter
(import ./_overlays/pi-mcp-adapter.nix {inherit inputs;})
# qmd # qmd
(import ./_overlays/qmd.nix {inherit inputs;}) (import ./_overlays/qmd.nix {inherit inputs;})
# jj-starship (passes through upstream overlay) # jj-starship (passes through upstream overlay)