Remove pi agent infrastructure
This commit is contained in:
@@ -1,27 +0,0 @@
|
||||
# AGENTS.md
|
||||
|
||||
## Version Control
|
||||
|
||||
- Use `jj` for version control, not `git`.
|
||||
- `jj tug` is an alias for `jj bookmark move --from closest_bookmark(@-) --to @-`.
|
||||
- Never attempt historically destructive Git commands.
|
||||
- Make small, frequent commits.
|
||||
|
||||
## Scripting
|
||||
|
||||
- Use Nushell (`nu`) for scripting.
|
||||
- Do not use Python, Perl, Lua, awk, or any other scripting language. You are programatically blocked from doing so.
|
||||
|
||||
## Workflow
|
||||
|
||||
- Always complete the requested work.
|
||||
- If there is any ambiguity about what to do next, do NOT make a decision yourself. Stop your work and ask.
|
||||
- Do not end with “If you want me to…” or “I can…”; take the next necessary step and finish the job without waiting for additional confirmation.
|
||||
- Do not future-proof things. Stick to the original plan.
|
||||
- Do not add fallbacks or backward compatibility unless explicitly required by the user. By default, replace the previous implementation with the new one entirely.
|
||||
|
||||
## Validation
|
||||
|
||||
- Do not ignore failing tests or checks, even if they appear unrelated to your changes.
|
||||
- After completing and validating your work, the final step is to run the project's full validation and test commands and ensure they all pass.
|
||||
|
||||
@@ -1,190 +0,0 @@
|
||||
/**
|
||||
* No Git Extension
|
||||
*
|
||||
* Blocks direct git invocations and tells the LLM to use jj (Jujutsu) instead.
|
||||
* Mentions of the word "git" in search patterns, strings, comments, etc. are allowed.
|
||||
*/
|
||||
|
||||
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
||||
import { isToolCallEventType } from "@mariozechner/pi-coding-agent";
|
||||
|
||||
type ShellToken =
|
||||
| { type: "word"; value: string }
|
||||
| { type: "operator"; value: string };
|
||||
|
||||
const COMMAND_PREFIXES = new Set(["env", "command", "builtin", "time", "sudo", "nohup", "nice"]);
|
||||
const SHELL_KEYWORDS = new Set(["if", "then", "elif", "else", "do", "while", "until", "case", "in"]);
|
||||
const SHELL_INTERPRETERS = new Set(["bash", "sh", "zsh", "fish", "nu"]);
|
||||
|
||||
function isAssignmentWord(value: string): boolean {
|
||||
return /^[A-Za-z_][A-Za-z0-9_]*=.*/.test(value);
|
||||
}
|
||||
|
||||
function tokenizeShell(command: string): ShellToken[] {
|
||||
const tokens: ShellToken[] = [];
|
||||
let current = "";
|
||||
let quote: "'" | '"' | null = null;
|
||||
|
||||
const pushWord = () => {
|
||||
if (!current) return;
|
||||
tokens.push({ type: "word", value: current });
|
||||
current = "";
|
||||
};
|
||||
|
||||
for (let i = 0; i < command.length; i++) {
|
||||
const char = command[i];
|
||||
|
||||
if (quote) {
|
||||
if (quote === "'") {
|
||||
if (char === "'") {
|
||||
quote = null;
|
||||
} else {
|
||||
current += char;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === '"') {
|
||||
quote = null;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === "\\") {
|
||||
if (i + 1 < command.length) {
|
||||
current += command[i + 1];
|
||||
i += 1;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
current += char;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === "'" || char === '"') {
|
||||
quote = char;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === "\\") {
|
||||
if (i + 1 < command.length) {
|
||||
current += command[i + 1];
|
||||
i += 1;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (/\s/.test(char)) {
|
||||
pushWord();
|
||||
if (char === "\n") {
|
||||
tokens.push({ type: "operator", value: "\n" });
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
const twoCharOperator = command.slice(i, i + 2);
|
||||
if (twoCharOperator === "&&" || twoCharOperator === "||") {
|
||||
pushWord();
|
||||
tokens.push({ type: "operator", value: twoCharOperator });
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === ";" || char === "|" || char === "(" || char === ")") {
|
||||
pushWord();
|
||||
tokens.push({ type: "operator", value: char });
|
||||
continue;
|
||||
}
|
||||
|
||||
current += char;
|
||||
}
|
||||
|
||||
pushWord();
|
||||
return tokens;
|
||||
}
|
||||
|
||||
function findCommandWord(words: string[]): { word?: string; index: number } {
|
||||
for (let i = 0; i < words.length; i++) {
|
||||
const word = words[i];
|
||||
if (SHELL_KEYWORDS.has(word)) {
|
||||
continue;
|
||||
}
|
||||
if (isAssignmentWord(word)) {
|
||||
continue;
|
||||
}
|
||||
if (COMMAND_PREFIXES.has(word)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return { word, index: i };
|
||||
}
|
||||
|
||||
return { index: words.length };
|
||||
}
|
||||
|
||||
function getInlineShellCommand(words: string[], commandIndex: number): string | null {
|
||||
for (let i = commandIndex + 1; i < words.length; i++) {
|
||||
const word = words[i];
|
||||
if (/^(?:-[A-Za-z]*c[A-Za-z]*|--command)$/.test(word)) {
|
||||
return words[i + 1] ?? null;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function segmentContainsBlockedGit(words: string[]): boolean {
|
||||
const { word, index } = findCommandWord(words);
|
||||
if (!word) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (word === "git") {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (word === "jj") {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (SHELL_INTERPRETERS.has(word)) {
|
||||
const inlineCommand = getInlineShellCommand(words, index);
|
||||
return inlineCommand ? containsBlockedGitInvocation(inlineCommand) : false;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function containsBlockedGitInvocation(command: string): boolean {
|
||||
const tokens = tokenizeShell(command);
|
||||
let words: string[] = [];
|
||||
|
||||
for (const token of tokens) {
|
||||
if (token.type === "operator") {
|
||||
if (segmentContainsBlockedGit(words)) {
|
||||
return true;
|
||||
}
|
||||
words = [];
|
||||
continue;
|
||||
}
|
||||
|
||||
words.push(token.value);
|
||||
}
|
||||
|
||||
return segmentContainsBlockedGit(words);
|
||||
}
|
||||
|
||||
export default function (pi: ExtensionAPI) {
|
||||
pi.on("tool_call", async (event, _ctx) => {
|
||||
if (!isToolCallEventType("bash", event)) return;
|
||||
|
||||
const command = event.input.command.trim();
|
||||
|
||||
if (containsBlockedGitInvocation(command)) {
|
||||
return {
|
||||
block: true,
|
||||
reason: "git is not used in this project. Use jj (Jujutsu) instead.",
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
/**
|
||||
* No Scripting Extension
|
||||
*
|
||||
* Blocks python, perl, ruby, php, lua, node -e, and inline bash/sh scripts.
|
||||
* Tells the LLM to use `nu -c` instead.
|
||||
*/
|
||||
|
||||
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
|
||||
import { isToolCallEventType } from "@mariozechner/pi-coding-agent";
|
||||
|
||||
const SCRIPTING_PATTERN =
|
||||
/(?:^|[;&|]\s*|&&\s*|\|\|\s*|\$\(\s*|`\s*)(?:python[23]?|perl|ruby|php|lua|node\s+-e|bash\s+-c|sh\s+-c)\s/;
|
||||
|
||||
export default function (pi: ExtensionAPI) {
|
||||
pi.on("tool_call", async (event, _ctx) => {
|
||||
if (!isToolCallEventType("bash", event)) return;
|
||||
|
||||
const command = event.input.command.trim();
|
||||
|
||||
if (SCRIPTING_PATTERN.test(command)) {
|
||||
return {
|
||||
block: true,
|
||||
reason:
|
||||
"Do not use python, perl, ruby, php, lua, node -e, or inline bash/sh for scripting. Use `nu -c` instead.",
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -1,687 +0,0 @@
|
||||
import { readFile, writeFile, mkdir, readdir } from "node:fs/promises";
|
||||
import * as fs from "node:fs";
|
||||
import * as os from "node:os";
|
||||
import * as path from "node:path";
|
||||
import * as crypto from "node:crypto";
|
||||
import { Box, Text } from "@mariozechner/pi-tui";
|
||||
import type { ExtensionAPI, ExtensionContext, ExtensionCommandContext, Model } from "@mariozechner/pi-coding-agent";
|
||||
import {
|
||||
createAgentSession,
|
||||
DefaultResourceLoader,
|
||||
getAgentDir,
|
||||
SessionManager,
|
||||
SettingsManager,
|
||||
} from "@mariozechner/pi-coding-agent";
|
||||
|
||||
interface IngestManifest {
|
||||
version: number;
|
||||
job_id: string;
|
||||
note_id: string;
|
||||
operation: string;
|
||||
requested_at: string;
|
||||
title: string;
|
||||
source_relpath: string;
|
||||
source_path: string;
|
||||
input_path: string;
|
||||
archive_path: string;
|
||||
output_path: string;
|
||||
transcript_path: string;
|
||||
result_path: string;
|
||||
session_dir: string;
|
||||
source_hash: string;
|
||||
last_generated_output_hash?: string | null;
|
||||
force_overwrite_generated?: boolean;
|
||||
source_transport?: string;
|
||||
}
|
||||
|
||||
interface IngestResult {
|
||||
success: boolean;
|
||||
job_id: string;
|
||||
note_id: string;
|
||||
archive_path: string;
|
||||
source_hash: string;
|
||||
session_dir: string;
|
||||
output_path?: string;
|
||||
output_hash?: string;
|
||||
conflict_path?: string;
|
||||
write_mode?: "create" | "overwrite" | "force-overwrite" | "conflict";
|
||||
updated_main_output?: boolean;
|
||||
transcript_path?: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
interface FrontmatterInfo {
|
||||
values: Record<string, string>;
|
||||
body: string;
|
||||
}
|
||||
|
||||
interface RenderedPage {
|
||||
path: string;
|
||||
image: {
|
||||
type: "image";
|
||||
source: {
|
||||
type: "base64";
|
||||
mediaType: string;
|
||||
data: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
const TRANSCRIBE_SKILL = "notability-transcribe";
|
||||
const NORMALIZE_SKILL = "notability-normalize";
|
||||
const STATUS_TYPE = "notability-status";
|
||||
const DEFAULT_TRANSCRIBE_THINKING = "low" as const;
|
||||
const DEFAULT_NORMALIZE_THINKING = "off" as const;
|
||||
const PREFERRED_VISION_MODEL: [string, string] = ["openai-codex", "gpt-5.4"];
|
||||
|
||||
function getNotesRoot(): string {
|
||||
return process.env.NOTABILITY_NOTES_DIR ?? path.join(os.homedir(), "Notes");
|
||||
}
|
||||
|
||||
function getDataRoot(): string {
|
||||
return process.env.NOTABILITY_DATA_ROOT ?? path.join(os.homedir(), ".local", "share", "notability-ingest");
|
||||
}
|
||||
|
||||
function getRenderRoot(): string {
|
||||
return process.env.NOTABILITY_RENDER_ROOT ?? path.join(getDataRoot(), "rendered-pages");
|
||||
}
|
||||
|
||||
function getNotabilityScriptDir(): string {
|
||||
return path.join(getAgentDir(), "notability");
|
||||
}
|
||||
|
||||
function getSkillPath(skillName: string): string {
|
||||
return path.join(getAgentDir(), "skills", skillName, "SKILL.md");
|
||||
}
|
||||
|
||||
function stripFrontmatterBlock(text: string): string {
|
||||
const trimmed = text.trim();
|
||||
if (!trimmed.startsWith("---\n")) return trimmed;
|
||||
const end = trimmed.indexOf("\n---\n", 4);
|
||||
if (end === -1) return trimmed;
|
||||
return trimmed.slice(end + 5).trim();
|
||||
}
|
||||
|
||||
function stripCodeFence(text: string): string {
|
||||
const trimmed = text.trim();
|
||||
const match = trimmed.match(/^```(?:markdown|md)?\n([\s\S]*?)\n```$/i);
|
||||
return match ? match[1].trim() : trimmed;
|
||||
}
|
||||
|
||||
function parseFrontmatter(text: string): FrontmatterInfo {
|
||||
const trimmed = stripCodeFence(text);
|
||||
if (!trimmed.startsWith("---\n")) {
|
||||
return { values: {}, body: trimmed };
|
||||
}
|
||||
|
||||
const end = trimmed.indexOf("\n---\n", 4);
|
||||
if (end === -1) {
|
||||
return { values: {}, body: trimmed };
|
||||
}
|
||||
|
||||
const block = trimmed.slice(4, end);
|
||||
const body = trimmed.slice(end + 5).trim();
|
||||
const values: Record<string, string> = {};
|
||||
for (const line of block.split("\n")) {
|
||||
const idx = line.indexOf(":");
|
||||
if (idx === -1) continue;
|
||||
const key = line.slice(0, idx).trim();
|
||||
const value = line.slice(idx + 1).trim();
|
||||
values[key] = value;
|
||||
}
|
||||
return { values, body };
|
||||
}
|
||||
|
||||
function quoteYaml(value: string): string {
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
|
||||
function sha256(content: string | Buffer): string {
|
||||
return crypto.createHash("sha256").update(content).digest("hex");
|
||||
}
|
||||
|
||||
async function sha256File(filePath: string): Promise<string> {
|
||||
const buffer = await readFile(filePath);
|
||||
return sha256(buffer);
|
||||
}
|
||||
|
||||
function extractTitle(normalized: string, fallbackTitle: string): string {
|
||||
const parsed = parseFrontmatter(normalized);
|
||||
const frontmatterTitle = parsed.values.title?.replace(/^['"]|['"]$/g, "").trim();
|
||||
if (frontmatterTitle) return frontmatterTitle;
|
||||
const heading = parsed.body
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.find((line) => line.startsWith("# "));
|
||||
if (heading) return heading.replace(/^#\s+/, "").trim();
|
||||
return fallbackTitle;
|
||||
}
|
||||
|
||||
function sourceFormat(filePath: string): string {
|
||||
const extension = path.extname(filePath).toLowerCase();
|
||||
if (extension === ".pdf") return "pdf";
|
||||
if (extension === ".png") return "png";
|
||||
return extension.replace(/^\./, "") || "unknown";
|
||||
}
|
||||
|
||||
function buildMarkdown(manifest: IngestManifest, normalized: string): string {
|
||||
const parsed = parseFrontmatter(normalized);
|
||||
const title = extractTitle(normalized, manifest.title);
|
||||
const now = new Date().toISOString().replace(/\.\d{3}Z$/, "Z");
|
||||
const created = manifest.requested_at.slice(0, 10);
|
||||
const body = parsed.body.trim();
|
||||
const outputBody = body.length > 0 ? body : `# ${title}\n`;
|
||||
|
||||
return [
|
||||
"---",
|
||||
`title: ${quoteYaml(title)}`,
|
||||
`created: ${quoteYaml(created)}`,
|
||||
`updated: ${quoteYaml(now.slice(0, 10))}`,
|
||||
`source: ${quoteYaml("notability")}`,
|
||||
`source_transport: ${quoteYaml(manifest.source_transport ?? "webdav")}`,
|
||||
`source_relpath: ${quoteYaml(manifest.source_relpath)}`,
|
||||
`note_id: ${quoteYaml(manifest.note_id)}`,
|
||||
`managed_by: ${quoteYaml("notability-ingest")}`,
|
||||
`source_file: ${quoteYaml(manifest.archive_path)}`,
|
||||
`source_file_hash: ${quoteYaml(`sha256:${manifest.source_hash}`)}`,
|
||||
`source_format: ${quoteYaml(sourceFormat(manifest.archive_path))}`,
|
||||
`status: ${quoteYaml("active")}`,
|
||||
"tags:",
|
||||
" - handwritten",
|
||||
" - notability",
|
||||
"---",
|
||||
"",
|
||||
outputBody,
|
||||
"",
|
||||
].join("\n");
|
||||
}
|
||||
|
||||
function conflictPathFor(outputPath: string): string {
|
||||
const parsed = path.parse(outputPath);
|
||||
const stamp = new Date().toISOString().replace(/[:]/g, "-").replace(/\.\d{3}Z$/, "Z");
|
||||
return path.join(parsed.dir, `${parsed.name}.conflict-${stamp}${parsed.ext}`);
|
||||
}
|
||||
|
||||
async function ensureParent(filePath: string): Promise<void> {
|
||||
await mkdir(path.dirname(filePath), { recursive: true });
|
||||
}
|
||||
|
||||
async function loadSkillText(skillName: string): Promise<string> {
|
||||
const raw = await readFile(getSkillPath(skillName), "utf8");
|
||||
return stripFrontmatterBlock(raw).trim();
|
||||
}
|
||||
|
||||
function normalizePathArg(arg: string): string {
|
||||
return arg.startsWith("@") ? arg.slice(1) : arg;
|
||||
}
|
||||
|
||||
function resolveModel(ctx: ExtensionContext, requireImage = false): Model {
|
||||
const available = ctx.modelRegistry.getAvailable();
|
||||
const matching = requireImage ? available.filter((model) => model.input.includes("image")) : available;
|
||||
|
||||
if (matching.length === 0) {
|
||||
throw new Error(
|
||||
requireImage
|
||||
? "No image-capable model configured for pi note ingestion"
|
||||
: "No available model configured for pi note ingestion",
|
||||
);
|
||||
}
|
||||
|
||||
if (ctx.model && (!requireImage || ctx.model.input.includes("image"))) {
|
||||
if (!requireImage) return ctx.model;
|
||||
}
|
||||
|
||||
if (requireImage) {
|
||||
const [provider, id] = PREFERRED_VISION_MODEL;
|
||||
const preferred = matching.find((model) => model.provider === provider && model.id === id);
|
||||
if (preferred) return preferred;
|
||||
|
||||
const subscriptionModel = matching.find(
|
||||
(model) => model.provider !== "opencode" && model.provider !== "opencode-go",
|
||||
);
|
||||
if (subscriptionModel) return subscriptionModel;
|
||||
}
|
||||
|
||||
if (ctx.model && (!requireImage || ctx.model.input.includes("image"))) {
|
||||
return ctx.model;
|
||||
}
|
||||
|
||||
return matching[0];
|
||||
}
|
||||
|
||||
async function runSkillPrompt(
|
||||
ctx: ExtensionContext,
|
||||
systemPrompt: string,
|
||||
prompt: string,
|
||||
images: RenderedPage[] = [],
|
||||
thinkingLevel: "off" | "low" = "off",
|
||||
): Promise<string> {
|
||||
if (images.length > 0) {
|
||||
const model = resolveModel(ctx, true);
|
||||
const { execFile } = await import("node:child_process");
|
||||
const promptPath = path.join(os.tmpdir(), `pi-note-ingest-${crypto.randomUUID()}.md`);
|
||||
await writeFile(promptPath, `${prompt}\n`);
|
||||
const args = [
|
||||
"45s",
|
||||
"pi",
|
||||
"--model",
|
||||
`${model.provider}/${model.id}`,
|
||||
"--thinking",
|
||||
thinkingLevel,
|
||||
"--no-tools",
|
||||
"--no-session",
|
||||
"-p",
|
||||
...images.map((page) => `@${page.path}`),
|
||||
`@${promptPath}`,
|
||||
];
|
||||
|
||||
try {
|
||||
const output = await new Promise<string>((resolve, reject) => {
|
||||
execFile("timeout", args, { cwd: ctx.cwd, env: process.env, maxBuffer: 10 * 1024 * 1024 }, (error, stdout, stderr) => {
|
||||
if ((stdout ?? "").trim().length > 0) {
|
||||
resolve(stdout);
|
||||
return;
|
||||
}
|
||||
if (error) {
|
||||
reject(new Error(stderr || stdout || error.message));
|
||||
return;
|
||||
}
|
||||
resolve(stdout);
|
||||
});
|
||||
});
|
||||
|
||||
return stripCodeFence(output).trim();
|
||||
} finally {
|
||||
try {
|
||||
fs.unlinkSync(promptPath);
|
||||
} catch {
|
||||
// Ignore temp file cleanup failures.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const agentDir = getAgentDir();
|
||||
const settingsManager = SettingsManager.create(ctx.cwd, agentDir);
|
||||
const resourceLoader = new DefaultResourceLoader({
|
||||
cwd: ctx.cwd,
|
||||
agentDir,
|
||||
settingsManager,
|
||||
noExtensions: true,
|
||||
noPromptTemplates: true,
|
||||
noThemes: true,
|
||||
noSkills: true,
|
||||
systemPromptOverride: () => systemPrompt,
|
||||
appendSystemPromptOverride: () => [],
|
||||
agentsFilesOverride: () => ({ agentsFiles: [] }),
|
||||
});
|
||||
await resourceLoader.reload();
|
||||
|
||||
const { session } = await createAgentSession({
|
||||
model: resolveModel(ctx, images.length > 0),
|
||||
thinkingLevel,
|
||||
sessionManager: SessionManager.inMemory(),
|
||||
modelRegistry: ctx.modelRegistry,
|
||||
resourceLoader,
|
||||
tools: [],
|
||||
});
|
||||
|
||||
let output = "";
|
||||
const unsubscribe = session.subscribe((event) => {
|
||||
if (event.type === "message_update" && event.assistantMessageEvent.type === "text_delta") {
|
||||
output += event.assistantMessageEvent.delta;
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
await session.prompt(prompt, {
|
||||
images: images.map((page) => page.image),
|
||||
});
|
||||
} finally {
|
||||
unsubscribe();
|
||||
}
|
||||
|
||||
if (!output.trim()) {
|
||||
const assistantMessages = session.messages.filter((message) => message.role === "assistant");
|
||||
const lastAssistant = assistantMessages.at(-1);
|
||||
if (lastAssistant && Array.isArray(lastAssistant.content)) {
|
||||
output = lastAssistant.content
|
||||
.filter((part) => part.type === "text")
|
||||
.map((part) => part.text)
|
||||
.join("");
|
||||
}
|
||||
}
|
||||
|
||||
session.dispose();
|
||||
return stripCodeFence(output).trim();
|
||||
}
|
||||
|
||||
async function renderPdfPages(pdfPath: string, jobId: string): Promise<RenderedPage[]> {
|
||||
const renderDir = path.join(getRenderRoot(), jobId);
|
||||
await mkdir(renderDir, { recursive: true });
|
||||
const prefix = path.join(renderDir, "page");
|
||||
const args = ["-png", "-r", "200", pdfPath, prefix];
|
||||
const { execFile } = await import("node:child_process");
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
execFile("pdftoppm", args, (error) => {
|
||||
if (error) reject(error);
|
||||
else resolve();
|
||||
});
|
||||
});
|
||||
|
||||
const entries = await readdir(renderDir);
|
||||
const pngs = entries
|
||||
.filter((entry) => entry.endsWith(".png"))
|
||||
.sort((left, right) => left.localeCompare(right, undefined, { numeric: true }));
|
||||
if (pngs.length === 0) {
|
||||
throw new Error(`No rendered pages produced for ${pdfPath}`);
|
||||
}
|
||||
|
||||
const pages: RenderedPage[] = [];
|
||||
for (const entry of pngs) {
|
||||
const pagePath = path.join(renderDir, entry);
|
||||
const buffer = await readFile(pagePath);
|
||||
pages.push({
|
||||
path: pagePath,
|
||||
image: {
|
||||
type: "image",
|
||||
source: {
|
||||
type: "base64",
|
||||
mediaType: "image/png",
|
||||
data: buffer.toString("base64"),
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
return pages;
|
||||
}
|
||||
|
||||
async function loadImagePage(imagePath: string): Promise<RenderedPage> {
|
||||
const extension = path.extname(imagePath).toLowerCase();
|
||||
const mediaType = extension === ".png" ? "image/png" : undefined;
|
||||
if (!mediaType) {
|
||||
throw new Error(`Unsupported image input format for ${imagePath}`);
|
||||
}
|
||||
|
||||
const buffer = await readFile(imagePath);
|
||||
return {
|
||||
path: imagePath,
|
||||
image: {
|
||||
type: "image",
|
||||
source: {
|
||||
type: "base64",
|
||||
mediaType,
|
||||
data: buffer.toString("base64"),
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function renderInputPages(inputPath: string, jobId: string): Promise<RenderedPage[]> {
|
||||
const extension = path.extname(inputPath).toLowerCase();
|
||||
if (extension === ".pdf") {
|
||||
return await renderPdfPages(inputPath, jobId);
|
||||
}
|
||||
if (extension === ".png") {
|
||||
return [await loadImagePage(inputPath)];
|
||||
}
|
||||
throw new Error(`Unsupported Notability input format: ${inputPath}`);
|
||||
}
|
||||
|
||||
async function findManagedOutputs(noteId: string): Promise<string[]> {
|
||||
const matches: string[] = [];
|
||||
const stack = [getNotesRoot()];
|
||||
|
||||
while (stack.length > 0) {
|
||||
const currentDir = stack.pop();
|
||||
if (!currentDir || !fs.existsSync(currentDir)) continue;
|
||||
|
||||
const entries = await readdir(currentDir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.name.startsWith(".")) continue;
|
||||
const fullPath = path.join(currentDir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
stack.push(fullPath);
|
||||
continue;
|
||||
}
|
||||
if (!entry.isFile() || !entry.name.endsWith(".md")) continue;
|
||||
|
||||
try {
|
||||
const parsed = parseFrontmatter(await readFile(fullPath, "utf8"));
|
||||
const managedBy = parsed.values.managed_by?.replace(/^['"]|['"]$/g, "");
|
||||
const frontmatterNoteId = parsed.values.note_id?.replace(/^['"]|['"]$/g, "");
|
||||
if (managedBy === "notability-ingest" && frontmatterNoteId === noteId) {
|
||||
matches.push(fullPath);
|
||||
}
|
||||
} catch {
|
||||
// Ignore unreadable or malformed files while scanning the notebook.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return matches.sort();
|
||||
}
|
||||
|
||||
async function resolveManagedOutputPath(noteId: string, configuredOutputPath: string): Promise<string> {
|
||||
if (fs.existsSync(configuredOutputPath)) {
|
||||
const parsed = parseFrontmatter(await readFile(configuredOutputPath, "utf8"));
|
||||
const managedBy = parsed.values.managed_by?.replace(/^['"]|['"]$/g, "");
|
||||
const frontmatterNoteId = parsed.values.note_id?.replace(/^['"]|['"]$/g, "");
|
||||
if (managedBy === "notability-ingest" && frontmatterNoteId === noteId) {
|
||||
return configuredOutputPath;
|
||||
}
|
||||
}
|
||||
|
||||
const discovered = await findManagedOutputs(noteId);
|
||||
if (discovered.length === 0) return configuredOutputPath;
|
||||
if (discovered.length === 1) return discovered[0];
|
||||
|
||||
throw new Error(
|
||||
`Multiple managed note files found for ${noteId}: ${discovered.join(", ")}`,
|
||||
);
|
||||
}
|
||||
|
||||
async function determineWriteTarget(manifest: IngestManifest, markdown: string): Promise<{
|
||||
outputPath: string;
|
||||
writePath: string;
|
||||
writeMode: "create" | "overwrite" | "force-overwrite" | "conflict";
|
||||
updatedMainOutput: boolean;
|
||||
}> {
|
||||
const outputPath = await resolveManagedOutputPath(manifest.note_id, manifest.output_path);
|
||||
if (!fs.existsSync(outputPath)) {
|
||||
return { outputPath, writePath: outputPath, writeMode: "create", updatedMainOutput: true };
|
||||
}
|
||||
|
||||
const existing = await readFile(outputPath, "utf8");
|
||||
const existingHash = sha256(existing);
|
||||
const parsed = parseFrontmatter(existing);
|
||||
const isManaged = parsed.values.managed_by?.replace(/^['"]|['"]$/g, "") === "notability-ingest";
|
||||
const sameNoteId = parsed.values.note_id?.replace(/^['"]|['"]$/g, "") === manifest.note_id;
|
||||
|
||||
if (manifest.last_generated_output_hash && existingHash === manifest.last_generated_output_hash) {
|
||||
return { outputPath, writePath: outputPath, writeMode: "overwrite", updatedMainOutput: true };
|
||||
}
|
||||
|
||||
if (manifest.force_overwrite_generated && isManaged && sameNoteId) {
|
||||
return { outputPath, writePath: outputPath, writeMode: "force-overwrite", updatedMainOutput: true };
|
||||
}
|
||||
|
||||
return {
|
||||
outputPath,
|
||||
writePath: conflictPathFor(outputPath),
|
||||
writeMode: "conflict",
|
||||
updatedMainOutput: false,
|
||||
};
|
||||
}
|
||||
|
||||
async function writeIngestResult(resultPath: string, payload: IngestResult): Promise<void> {
|
||||
await ensureParent(resultPath);
|
||||
await writeFile(resultPath, JSON.stringify(payload, null, 2));
|
||||
}
|
||||
|
||||
async function ingestManifest(manifestPath: string, ctx: ExtensionContext): Promise<IngestResult> {
|
||||
const manifest = JSON.parse(await readFile(manifestPath, "utf8")) as IngestManifest;
|
||||
await ensureParent(manifest.transcript_path);
|
||||
await ensureParent(manifest.result_path);
|
||||
await mkdir(manifest.session_dir, { recursive: true });
|
||||
|
||||
const normalizeSkill = await loadSkillText(NORMALIZE_SKILL);
|
||||
const pages = await renderInputPages(manifest.input_path, manifest.job_id);
|
||||
const pageSummary = pages.map((page, index) => `- page ${index + 1}: ${page.path}`).join("\n");
|
||||
const transcriptPrompt = [
|
||||
"Transcribe this note into clean Markdown.",
|
||||
"Read it like a human and preserve the intended reading order and visible structure.",
|
||||
"Keep headings, lists, and paragraphs when they are visible.",
|
||||
"Do not summarize. Do not add commentary. Return Markdown only.",
|
||||
"Rendered pages:",
|
||||
pageSummary,
|
||||
].join("\n\n");
|
||||
let transcript = await runSkillPrompt(
|
||||
ctx,
|
||||
"",
|
||||
transcriptPrompt,
|
||||
pages,
|
||||
DEFAULT_TRANSCRIBE_THINKING,
|
||||
);
|
||||
if (!transcript.trim()) {
|
||||
throw new Error("Transcription skill returned empty output");
|
||||
}
|
||||
await writeFile(manifest.transcript_path, `${transcript.trim()}\n`);
|
||||
|
||||
const normalizePrompt = [
|
||||
`Note ID: ${manifest.note_id}`,
|
||||
`Source path: ${manifest.source_relpath}`,
|
||||
`Preferred output path: ${manifest.output_path}`,
|
||||
"Normalize the following transcription into clean Markdown.",
|
||||
"Restore natural prose formatting and intended reading order when the transcription contains OCR or layout artifacts.",
|
||||
"If words are split across separate lines but clearly belong to the same phrase or sentence, merge them.",
|
||||
"Return only Markdown. No code fences.",
|
||||
"",
|
||||
"<transcription>",
|
||||
transcript.trim(),
|
||||
"</transcription>",
|
||||
].join("\n");
|
||||
const normalized = await runSkillPrompt(
|
||||
ctx,
|
||||
normalizeSkill,
|
||||
normalizePrompt,
|
||||
[],
|
||||
DEFAULT_NORMALIZE_THINKING,
|
||||
);
|
||||
if (!normalized.trim()) {
|
||||
throw new Error("Normalization skill returned empty output");
|
||||
}
|
||||
|
||||
const markdown = buildMarkdown(manifest, normalized);
|
||||
const target = await determineWriteTarget(manifest, markdown);
|
||||
await ensureParent(target.writePath);
|
||||
await writeFile(target.writePath, markdown);
|
||||
|
||||
const result: IngestResult = {
|
||||
success: true,
|
||||
job_id: manifest.job_id,
|
||||
note_id: manifest.note_id,
|
||||
archive_path: manifest.archive_path,
|
||||
source_hash: manifest.source_hash,
|
||||
session_dir: manifest.session_dir,
|
||||
output_path: target.outputPath,
|
||||
output_hash: target.updatedMainOutput ? await sha256File(target.writePath) : undefined,
|
||||
conflict_path: target.writeMode === "conflict" ? target.writePath : undefined,
|
||||
write_mode: target.writeMode,
|
||||
updated_main_output: target.updatedMainOutput,
|
||||
transcript_path: manifest.transcript_path,
|
||||
};
|
||||
await writeIngestResult(manifest.result_path, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
async function runScript(scriptName: string, args: string[]): Promise<string> {
|
||||
const { execFile } = await import("node:child_process");
|
||||
const scriptPath = path.join(getNotabilityScriptDir(), scriptName);
|
||||
return await new Promise<string>((resolve, reject) => {
|
||||
execFile("nu", [scriptPath, ...args], (error, stdout, stderr) => {
|
||||
if (error) {
|
||||
reject(new Error(stderr || stdout || error.message));
|
||||
return;
|
||||
}
|
||||
resolve(stdout.trim());
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function splitArgs(input: string): string[] {
|
||||
return input
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.filter((part) => part.length > 0);
|
||||
}
|
||||
|
||||
function postStatus(pi: ExtensionAPI, content: string): void {
|
||||
pi.sendMessage({
|
||||
customType: STATUS_TYPE,
|
||||
content,
|
||||
display: true,
|
||||
});
|
||||
}
|
||||
|
||||
export default function noteIngestExtension(pi: ExtensionAPI) {
|
||||
pi.registerMessageRenderer(STATUS_TYPE, (message, _options, theme) => {
|
||||
const box = new Box(1, 1, (text) => theme.bg("customMessageBg", text));
|
||||
box.addChild(new Text(message.content, 0, 0));
|
||||
return box;
|
||||
});
|
||||
|
||||
pi.registerCommand("note-status", {
|
||||
description: "Show Notability ingest status",
|
||||
handler: async (args, _ctx) => {
|
||||
const output = await runScript("status.nu", splitArgs(args));
|
||||
postStatus(pi, output.length > 0 ? output : "No status output");
|
||||
},
|
||||
});
|
||||
|
||||
pi.registerCommand("note-reingest", {
|
||||
description: "Enqueue a note for reingestion",
|
||||
handler: async (args, _ctx) => {
|
||||
const trimmed = args.trim();
|
||||
if (!trimmed) {
|
||||
postStatus(pi, "Usage: /note-reingest <note-id> [--latest-source|--latest-archive] [--force-overwrite-generated]");
|
||||
return;
|
||||
}
|
||||
const output = await runScript("reingest.nu", splitArgs(trimmed));
|
||||
postStatus(pi, output.length > 0 ? output : "Reingest enqueued");
|
||||
},
|
||||
});
|
||||
|
||||
pi.registerCommand("note-ingest", {
|
||||
description: "Ingest a queued Notability job manifest",
|
||||
handler: async (args, ctx: ExtensionCommandContext) => {
|
||||
const manifestPath = normalizePathArg(args.trim());
|
||||
if (!manifestPath) {
|
||||
throw new Error("Usage: /note-ingest <job.json>");
|
||||
}
|
||||
|
||||
let resultPath = "";
|
||||
try {
|
||||
const raw = await readFile(manifestPath, "utf8");
|
||||
const manifest = JSON.parse(raw) as IngestManifest;
|
||||
resultPath = manifest.result_path;
|
||||
const result = await ingestManifest(manifestPath, ctx);
|
||||
postStatus(pi, `Ingested ${result.note_id} (${result.write_mode})`);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
if (resultPath) {
|
||||
const manifest = JSON.parse(await readFile(manifestPath, "utf8")) as IngestManifest;
|
||||
await writeIngestResult(resultPath, {
|
||||
success: false,
|
||||
job_id: manifest.job_id,
|
||||
note_id: manifest.note_id,
|
||||
archive_path: manifest.archive_path,
|
||||
source_hash: manifest.source_hash,
|
||||
session_dir: manifest.session_dir,
|
||||
error: message,
|
||||
});
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,260 +0,0 @@
|
||||
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
|
||||
import {
|
||||
createAgentSession,
|
||||
DefaultResourceLoader,
|
||||
getAgentDir,
|
||||
SessionManager,
|
||||
SettingsManager,
|
||||
} from "@mariozechner/pi-coding-agent";
|
||||
|
||||
interface SessionNameState {
|
||||
hasAutoNamed: boolean;
|
||||
}
|
||||
|
||||
const TITLE_MODEL = {
|
||||
provider: "openai-codex",
|
||||
id: "gpt-5.4-mini",
|
||||
} as const;
|
||||
|
||||
const MAX_TITLE_LENGTH = 50;
|
||||
const MAX_RETRIES = 2;
|
||||
const FALLBACK_LENGTH = 50;
|
||||
const TITLE_ENTRY_TYPE = "vendored-session-title";
|
||||
|
||||
const TITLE_SYSTEM_PROMPT = `You are generating a succinct title for a coding session based on the provided conversation.
|
||||
|
||||
Requirements:
|
||||
- Maximum 50 characters
|
||||
- Sentence case (capitalize only first word and proper nouns)
|
||||
- Capture the main intent or task
|
||||
- Reuse the user's exact words and technical terms
|
||||
- Match the user's language
|
||||
- No quotes, colons, or markdown formatting
|
||||
- No generic titles like "Coding session" or "Help with code"
|
||||
- No explanations or commentary
|
||||
|
||||
Output ONLY the title text. Nothing else.`;
|
||||
|
||||
function isTurnCompleted(event: unknown): boolean {
|
||||
if (!event || typeof event !== "object") return false;
|
||||
const message = (event as { message?: unknown }).message;
|
||||
if (!message || typeof message !== "object") return false;
|
||||
const stopReason = (message as { stopReason?: unknown }).stopReason;
|
||||
return typeof stopReason === "string" && stopReason.toLowerCase() === "stop";
|
||||
}
|
||||
|
||||
function buildFallbackTitle(userText: string): string {
|
||||
const text = userText.trim();
|
||||
if (text.length <= FALLBACK_LENGTH) return text;
|
||||
const truncated = text.slice(0, FALLBACK_LENGTH - 3);
|
||||
const lastSpace = truncated.lastIndexOf(" ");
|
||||
return `${lastSpace > 0 ? truncated.slice(0, lastSpace) : truncated}...`;
|
||||
}
|
||||
|
||||
function postProcessTitle(raw: string): string {
|
||||
let title = raw;
|
||||
|
||||
title = title.replace(/<thinking[\s\S]*?<\/thinking>\s*/g, "");
|
||||
title = title.replace(/^["'`]+|["'`]+$/g, "");
|
||||
title = title.replace(/^#+\s*/, "");
|
||||
title = title.replace(/\*{1,2}(.*?)\*{1,2}/g, "$1");
|
||||
title = title.replace(/_{1,2}(.*?)_{1,2}/g, "$1");
|
||||
title = title.replace(/^(Title|Summary|Session)\s*:\s*/i, "");
|
||||
title =
|
||||
title
|
||||
.split("\n")
|
||||
.map((line) => line.trim())
|
||||
.find((line) => line.length > 0) ?? title;
|
||||
title = title.trim();
|
||||
|
||||
if (title.length > MAX_TITLE_LENGTH) {
|
||||
const truncated = title.slice(0, MAX_TITLE_LENGTH - 3);
|
||||
const lastSpace = truncated.lastIndexOf(" ");
|
||||
title = `${lastSpace > 0 ? truncated.slice(0, lastSpace) : truncated}...`;
|
||||
}
|
||||
|
||||
return title;
|
||||
}
|
||||
|
||||
function getLatestUserText(ctx: ExtensionContext): string | null {
|
||||
const entries = ctx.sessionManager.getEntries();
|
||||
for (let i = entries.length - 1; i >= 0; i -= 1) {
|
||||
const entry = entries[i];
|
||||
if (!entry || entry.type !== "message") continue;
|
||||
if (entry.message.role !== "user") continue;
|
||||
|
||||
const { content } = entry.message as { content: unknown };
|
||||
if (typeof content === "string") return content;
|
||||
if (!Array.isArray(content)) return null;
|
||||
|
||||
return content
|
||||
.filter(
|
||||
(part): part is { type: string; text?: string } =>
|
||||
typeof part === "object" && part !== null && "type" in part,
|
||||
)
|
||||
.filter((part) => part.type === "text" && typeof part.text === "string")
|
||||
.map((part) => part.text ?? "")
|
||||
.join(" ");
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function getLatestAssistantText(ctx: ExtensionContext): string | null {
|
||||
const entries = ctx.sessionManager.getEntries();
|
||||
for (let i = entries.length - 1; i >= 0; i -= 1) {
|
||||
const entry = entries[i];
|
||||
if (!entry || entry.type !== "message") continue;
|
||||
if (entry.message.role !== "assistant") continue;
|
||||
|
||||
const { content } = entry.message as { content: unknown };
|
||||
if (typeof content === "string") return content;
|
||||
if (!Array.isArray(content)) return null;
|
||||
|
||||
return content
|
||||
.filter(
|
||||
(part): part is { type: string; text?: string } =>
|
||||
typeof part === "object" && part !== null && "type" in part,
|
||||
)
|
||||
.filter((part) => part.type === "text" && typeof part.text === "string")
|
||||
.map((part) => part.text ?? "")
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function resolveModel(ctx: ExtensionContext) {
|
||||
const available = ctx.modelRegistry.getAvailable();
|
||||
const model = available.find(
|
||||
(candidate) => candidate.provider === TITLE_MODEL.provider && candidate.id === TITLE_MODEL.id,
|
||||
);
|
||||
if (model) return model;
|
||||
|
||||
const existsWithoutKey = ctx.modelRegistry
|
||||
.getAll()
|
||||
.some((candidate) => candidate.provider === TITLE_MODEL.provider && candidate.id === TITLE_MODEL.id);
|
||||
if (existsWithoutKey) {
|
||||
throw new Error(
|
||||
`Model ${TITLE_MODEL.provider}/${TITLE_MODEL.id} exists but has no configured API key.`,
|
||||
);
|
||||
}
|
||||
|
||||
throw new Error(`Model ${TITLE_MODEL.provider}/${TITLE_MODEL.id} is not available.`);
|
||||
}
|
||||
|
||||
async function generateTitle(userText: string, assistantText: string, ctx: ExtensionContext): Promise<string> {
|
||||
const agentDir = getAgentDir();
|
||||
const settingsManager = SettingsManager.create(ctx.cwd, agentDir);
|
||||
const resourceLoader = new DefaultResourceLoader({
|
||||
cwd: ctx.cwd,
|
||||
agentDir,
|
||||
settingsManager,
|
||||
noExtensions: true,
|
||||
noPromptTemplates: true,
|
||||
noThemes: true,
|
||||
noSkills: true,
|
||||
systemPromptOverride: () => TITLE_SYSTEM_PROMPT,
|
||||
appendSystemPromptOverride: () => [],
|
||||
agentsFilesOverride: () => ({ agentsFiles: [] }),
|
||||
});
|
||||
await resourceLoader.reload();
|
||||
|
||||
const { session } = await createAgentSession({
|
||||
model: resolveModel(ctx),
|
||||
thinkingLevel: "off",
|
||||
sessionManager: SessionManager.inMemory(),
|
||||
modelRegistry: ctx.modelRegistry,
|
||||
resourceLoader,
|
||||
});
|
||||
|
||||
let accumulated = "";
|
||||
const unsubscribe = session.subscribe((event) => {
|
||||
if (event.type === "message_update" && event.assistantMessageEvent.type === "text_delta") {
|
||||
accumulated += event.assistantMessageEvent.delta;
|
||||
}
|
||||
});
|
||||
|
||||
const description = assistantText
|
||||
? `<user>${userText}</user>\n<assistant>${assistantText}</assistant>`
|
||||
: `<user>${userText}</user>`;
|
||||
const userMessage = `<conversation>\n${description}\n</conversation>\n\nGenerate a title:`;
|
||||
|
||||
try {
|
||||
await session.prompt(userMessage);
|
||||
} finally {
|
||||
unsubscribe();
|
||||
session.dispose();
|
||||
}
|
||||
|
||||
return postProcessTitle(accumulated);
|
||||
}
|
||||
|
||||
async function generateAndSetTitle(pi: ExtensionAPI, ctx: ExtensionContext): Promise<void> {
|
||||
const userText = getLatestUserText(ctx);
|
||||
if (!userText?.trim()) return;
|
||||
|
||||
const assistantText = getLatestAssistantText(ctx) ?? "";
|
||||
if (!assistantText.trim()) return;
|
||||
|
||||
let lastError: Error | null = null;
|
||||
for (let attempt = 1; attempt <= MAX_RETRIES; attempt += 1) {
|
||||
try {
|
||||
const title = await generateTitle(userText, assistantText, ctx);
|
||||
if (!title) continue;
|
||||
|
||||
pi.setSessionName(title);
|
||||
pi.appendEntry(TITLE_ENTRY_TYPE, {
|
||||
title,
|
||||
rawUserText: userText,
|
||||
rawAssistantText: assistantText,
|
||||
attempt,
|
||||
model: `${TITLE_MODEL.provider}/${TITLE_MODEL.id}`,
|
||||
});
|
||||
ctx.ui.notify(`Session: ${title}`, "info");
|
||||
return;
|
||||
} catch (error) {
|
||||
lastError = error instanceof Error ? error : new Error(String(error));
|
||||
}
|
||||
}
|
||||
|
||||
const fallback = buildFallbackTitle(userText);
|
||||
pi.setSessionName(fallback);
|
||||
pi.appendEntry(TITLE_ENTRY_TYPE, {
|
||||
title: fallback,
|
||||
fallback: true,
|
||||
error: lastError?.message ?? "Unknown error",
|
||||
rawUserText: userText,
|
||||
rawAssistantText: assistantText,
|
||||
model: `${TITLE_MODEL.provider}/${TITLE_MODEL.id}`,
|
||||
});
|
||||
ctx.ui.notify(`Title generation failed, using fallback: ${fallback}`, "warning");
|
||||
}
|
||||
|
||||
export default function setupSessionNameHook(pi: ExtensionAPI) {
|
||||
const state: SessionNameState = {
|
||||
hasAutoNamed: false,
|
||||
};
|
||||
|
||||
pi.on("session_start", async () => {
|
||||
state.hasAutoNamed = false;
|
||||
});
|
||||
|
||||
pi.on("session_switch", async () => {
|
||||
state.hasAutoNamed = false;
|
||||
});
|
||||
|
||||
pi.on("turn_end", async (event, ctx) => {
|
||||
if (state.hasAutoNamed) return;
|
||||
|
||||
if (pi.getSessionName()) {
|
||||
state.hasAutoNamed = true;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isTurnCompleted(event)) return;
|
||||
|
||||
await generateAndSetTitle(pi, ctx);
|
||||
state.hasAutoNamed = true;
|
||||
});
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
{
|
||||
"mcpServers": {
|
||||
"opensrc": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "opensrc-mcp"],
|
||||
"lifecycle": "eager"
|
||||
},
|
||||
"context7": {
|
||||
"url": "https://mcp.context7.com/mcp",
|
||||
"lifecycle": "eager"
|
||||
},
|
||||
"grep_app": {
|
||||
"url": "https://mcp.grep.app",
|
||||
"lifecycle": "eager"
|
||||
},
|
||||
"sentry": {
|
||||
"url": "https://mcp.sentry.dev/mcp",
|
||||
"auth": "oauth"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,143 +0,0 @@
|
||||
---
|
||||
name: jujutsu
|
||||
description: Manages version control with Jujutsu (jj), including rebasing, conflict resolution, and Git interop. Use when tracking changes, navigating history, squashing/splitting commits, or pushing to Git remotes.
|
||||
---
|
||||
|
||||
# Jujutsu
|
||||
|
||||
Git-compatible VCS focused on concurrent development and ease of use.
|
||||
|
||||
> ⚠️ **Not Git!** Jujutsu syntax differs from Git:
|
||||
>
|
||||
> - Parent: `@-` not `@~1` or `@^`
|
||||
> - Grandparent: `@--` not `@~2`
|
||||
> - Child: `@+` not `@~-1`
|
||||
> - Use `jj log` not `jj changes`
|
||||
|
||||
## Key Commands
|
||||
|
||||
| Command | Description |
|
||||
| -------------------------- | -------------------------------------------- |
|
||||
| `jj st` | Show working copy status |
|
||||
| `jj log` | Show change log |
|
||||
| `jj diff` | Show changes in working copy |
|
||||
| `jj new` | Create new change |
|
||||
| `jj desc` | Edit change description |
|
||||
| `jj squash` | Move changes to parent |
|
||||
| `jj split` | Split current change |
|
||||
| `jj rebase -s src -d dest` | Rebase changes |
|
||||
| `jj absorb` | Move changes into stack of mutable revisions |
|
||||
| `jj bisect` | Find bad revision by bisection |
|
||||
| `jj fix` | Update files with formatting fixes |
|
||||
| `jj sign` | Cryptographically sign a revision |
|
||||
| `jj metaedit` | Modify metadata without changing content |
|
||||
|
||||
## Basic Workflow
|
||||
|
||||
```bash
|
||||
jj new # Create new change
|
||||
jj desc -m "feat: add feature" # Set description
|
||||
jj log # View history
|
||||
jj edit change-id # Switch to change
|
||||
jj new --before @ # Time travel (create before current)
|
||||
jj edit @- # Go to parent
|
||||
```
|
||||
|
||||
## Time Travel
|
||||
|
||||
```bash
|
||||
jj edit change-id # Switch to specific change
|
||||
jj next --edit # Next child change
|
||||
jj edit @- # Parent change
|
||||
jj new --before @ -m msg # Insert before current
|
||||
```
|
||||
|
||||
## Merging & Rebasing
|
||||
|
||||
```bash
|
||||
jj new x yz -m msg # Merge changes
|
||||
jj rebase -s src -d dest # Rebase source onto dest
|
||||
jj abandon # Delete current change
|
||||
```
|
||||
|
||||
## Conflicts
|
||||
|
||||
```bash
|
||||
jj resolve # Interactive conflict resolution
|
||||
# Edit files, then continue
|
||||
```
|
||||
|
||||
## Revset Syntax
|
||||
|
||||
**Parent/child operators:**
|
||||
|
||||
| Syntax | Meaning | Example |
|
||||
| ------ | ---------------- | -------------------- |
|
||||
| `@-` | Parent of @ | `jj diff -r @-` |
|
||||
| `@--` | Grandparent | `jj log -r @--` |
|
||||
| `x-` | Parent of x | `jj diff -r abc123-` |
|
||||
| `@+` | Child of @ | `jj log -r @+` |
|
||||
| `x::y` | x to y inclusive | `jj log -r main::@` |
|
||||
| `x..y` | x to y exclusive | `jj log -r main..@` |
|
||||
| `x\|y` | Union (or) | `jj log -r 'a \| b'` |
|
||||
|
||||
**⚠️ Common mistakes:**
|
||||
|
||||
- ❌ `@~1` → ✅ `@-` (parent)
|
||||
- ❌ `@^` → ✅ `@-` (parent)
|
||||
- ❌ `@~-1` → ✅ `@+` (child)
|
||||
- ❌ `jj changes` → ✅ `jj log` or `jj diff`
|
||||
- ❌ `a,b,c` → ✅ `a | b | c` (union uses pipe, not comma)
|
||||
|
||||
**Functions:**
|
||||
|
||||
```bash
|
||||
jj log -r 'heads(all())' # All heads
|
||||
jj log -r 'remote_bookmarks()..' # Not on remote
|
||||
jj log -r 'author(name)' # By author
|
||||
jj log -r 'description(regex)' # By description
|
||||
jj log -r 'mine()' # My commits
|
||||
jj log -r 'committer_date(after:"7 days ago")' # Recent commits
|
||||
jj log -r 'mine() & committer_date(after:"yesterday")' # My recent
|
||||
```
|
||||
|
||||
## Templates
|
||||
|
||||
```bash
|
||||
jj log -T 'commit_id ++ "\n" ++ description'
|
||||
```
|
||||
|
||||
## Git Interop
|
||||
|
||||
```bash
|
||||
jj bookmark create main -r @ # Create bookmark
|
||||
jj git push --bookmark main # Push bookmark
|
||||
jj git fetch # Fetch from remote
|
||||
jj bookmark track main@origin # Track remote
|
||||
```
|
||||
|
||||
## Advanced Commands
|
||||
|
||||
```bash
|
||||
jj absorb # Auto-move changes to relevant commits in stack
|
||||
jj bisect start # Start bisection
|
||||
jj bisect good # Mark current as good
|
||||
jj bisect bad # Mark current as bad
|
||||
jj fix # Run configured formatters on files
|
||||
jj sign -r @ # Sign current revision
|
||||
jj metaedit -r @ -m "new message" # Edit metadata only
|
||||
```
|
||||
|
||||
## Tips
|
||||
|
||||
- No staging: changes are immediate
|
||||
- Use conventional commits: `type(scope): desc`
|
||||
- `jj undo` to revert operations
|
||||
- `jj op log` to see operation history
|
||||
- Bookmarks are like branches
|
||||
- `jj absorb` is powerful for fixing up commits in a stack
|
||||
|
||||
## Related Skills
|
||||
|
||||
- **gh**: GitHub CLI for PRs and issues
|
||||
- **review**: Code review before committing
|
||||
@@ -1,36 +0,0 @@
|
||||
---
|
||||
name: notability-normalize
|
||||
description: Normalizes an exact Notability transcription into clean, searchable Markdown while preserving all original content and uncertainty markers. Use after a faithful transcription pass.
|
||||
---
|
||||
|
||||
# Notability Normalize
|
||||
|
||||
You are doing a **Markdown normalization** pass on a previously transcribed Notability note.
|
||||
|
||||
## Rules
|
||||
|
||||
- Do **not** summarize.
|
||||
- Do **not** remove uncertainty markers such as `[unclear: ...]`.
|
||||
- Preserve all substantive content from the transcription.
|
||||
- Clean up only formatting and Markdown structure.
|
||||
- Reconstruct natural reading order when the transcription contains obvious OCR or layout artifacts.
|
||||
- Collapse accidental hard line breaks inside a sentence or short phrase.
|
||||
- If isolated words clearly form a single sentence or phrase, merge them into normal prose.
|
||||
- Prefer readable Markdown headings, lists, and tables.
|
||||
- Keep content in the same overall order as the transcription.
|
||||
- Do not invent content.
|
||||
- Do not output code fences.
|
||||
- Output Markdown only.
|
||||
|
||||
## Output
|
||||
|
||||
- Produce a clean Markdown document.
|
||||
- Include a top-level `#` heading if the note clearly has a title.
|
||||
- Use standard Markdown lists and checkboxes.
|
||||
- Represent tables as Markdown tables when practical.
|
||||
- Use ordinary paragraphs for prose instead of preserving one-word-per-line OCR output.
|
||||
- Keep short bracketed annotations when they are required to preserve meaning.
|
||||
|
||||
## Important
|
||||
|
||||
The source PDF remains the ground truth. When in doubt, preserve ambiguity instead of cleaning it away.
|
||||
@@ -1,38 +0,0 @@
|
||||
---
|
||||
name: notability-transcribe
|
||||
description: Faithfully transcribes handwritten or mixed handwritten/typed Notability note pages into Markdown without summarizing. Use when converting note page images or PDFs into an exact textual transcription.
|
||||
---
|
||||
|
||||
# Notability Transcribe
|
||||
|
||||
You are doing a **faithful transcription** pass for handwritten Notability notes.
|
||||
|
||||
## Rules
|
||||
|
||||
- Preserve the original order of content.
|
||||
- Reconstruct the intended reading order from the page layout.
|
||||
- Read the page in the order a human would: top-to-bottom and left-to-right, while respecting obvious grouping.
|
||||
- Do **not** summarize, explain, clean up, or reorganize beyond what is necessary to transcribe faithfully.
|
||||
- Preserve headings, bullets, numbered items, checkboxes, tables, separators, callouts, and obvious layout structure.
|
||||
- Do **not** preserve accidental OCR-style hard line breaks when the note is clearly continuous prose or a single phrase.
|
||||
- If words are staggered on the page but clearly belong to the same sentence, combine them into normal lines.
|
||||
- If text is uncertain, keep the uncertainty inline as `[unclear: ...]`.
|
||||
- If a word is partially legible, include the best reading and uncertainty marker.
|
||||
- If there is a drawing or diagram that cannot be represented exactly, describe it minimally in brackets, for example `[diagram: arrow from A to B]`.
|
||||
- Preserve language exactly as written.
|
||||
- Do not invent missing words.
|
||||
- Do not output code fences.
|
||||
- Output Markdown only.
|
||||
|
||||
## Output shape
|
||||
|
||||
- Use headings when headings are clearly present.
|
||||
- Use `- [ ]` or `- [x]` for checkboxes when visible.
|
||||
- Use bullet lists for bullet lists.
|
||||
- Use normal paragraphs or single-line phrases for continuous prose instead of one word per line.
|
||||
- Keep side notes in the position that best preserves reading order.
|
||||
- Insert blank lines between major sections.
|
||||
|
||||
## Safety
|
||||
|
||||
If a page is partly unreadable, still transcribe everything you can and mark uncertain content with `[unclear: ...]`.
|
||||
@@ -1,11 +1,28 @@
|
||||
# Global AGENTS.md
|
||||
# AGENTS.md
|
||||
|
||||
## Version Control
|
||||
|
||||
- Use `jj` for VCS, not `git`
|
||||
- `jj tug` is an alias for `jj bookmark move --from closest_bookmark(@-) --to @-`
|
||||
- Use `jj` for version control, not `git`.
|
||||
- `jj tug` is an alias for `jj bookmark move --from closest_bookmark(@-) --to @-`.
|
||||
- Never attempt historically destructive Git commands.
|
||||
- Make small, frequent commits.
|
||||
- "Commit" means `jj commit`, not `jj desc`; `desc` stays on the same working copy.
|
||||
|
||||
## Scripting
|
||||
|
||||
- Always use Nushell (`nu`) for scripting
|
||||
- Never use Python, Perl, Lua, awk, or any other scripting language
|
||||
- Use Nushell (`nu`) for scripting.
|
||||
- Do not use Python, Perl, Lua, awk, or any other scripting language. You are programatically blocked from doing so.
|
||||
|
||||
## Workflow
|
||||
|
||||
- Always complete the requested work.
|
||||
- If there is any ambiguity about what to do next, do NOT make a decision yourself. Stop your work and ask.
|
||||
- Do not end with “If you want me to…” or “I can…”; take the next necessary step and finish the job without waiting for additional confirmation.
|
||||
- Do not future-proof things. Stick to the original plan.
|
||||
- Do not add fallbacks or backward compatibility unless explicitly required by the user. By default, replace the previous implementation with the new one entirely.
|
||||
|
||||
## Validation
|
||||
|
||||
- Do not ignore failing tests or checks, even if they appear unrelated to your changes.
|
||||
- After completing and validating your work, the final step is to run the project's full validation and test commands and ensure they all pass.
|
||||
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
{inputs, ...}: final: prev: {
|
||||
pi-agent-stuff =
|
||||
prev.buildNpmPackage {
|
||||
pname = "pi-agent-stuff";
|
||||
version = "1.5.0";
|
||||
src = inputs.pi-agent-stuff;
|
||||
npmDepsHash = "sha256-pyXMNdlie8vAkhz2f3GUGT3CCYuwt+xkWnsijBajXIo=";
|
||||
dontNpmBuild = true;
|
||||
};
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
{inputs, ...}: final: prev: {
|
||||
pi-harness =
|
||||
prev.stdenvNoCC.mkDerivation {
|
||||
pname = "pi-harness";
|
||||
version = "0.0.0";
|
||||
src = inputs.pi-harness;
|
||||
|
||||
pnpmDeps =
|
||||
prev.fetchPnpmDeps {
|
||||
pname = "pi-harness";
|
||||
version = "0.0.0";
|
||||
src = inputs.pi-harness;
|
||||
pnpm = prev.pnpm_10;
|
||||
fetcherVersion = 3;
|
||||
hash = "sha256-lNcZRCmmwq9t05UjVWcuGq+ZzRHuHNmqKQIVPh6DoxQ=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
prev.pnpmConfigHook
|
||||
prev.pnpm_10
|
||||
prev.nodejs
|
||||
];
|
||||
|
||||
dontBuild = true;
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
mkdir -p $out/lib/node_modules/@aliou/pi-harness
|
||||
cp -r . $out/lib/node_modules/@aliou/pi-harness
|
||||
runHook postInstall
|
||||
'';
|
||||
};
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
{inputs, ...}: final: prev: {
|
||||
pi-mcp-adapter =
|
||||
prev.buildNpmPackage {
|
||||
pname = "pi-mcp-adapter";
|
||||
version = "2.2.0";
|
||||
src = inputs.pi-mcp-adapter;
|
||||
npmDepsHash = "sha256-myJ9h/zC/KDddt8NOVvJjjqbnkdEN4ZR+okCR5nu7hM=";
|
||||
dontNpmBuild = true;
|
||||
};
|
||||
}
|
||||
@@ -123,68 +123,5 @@ in {
|
||||
};
|
||||
"opencode/AGENTS.md".source = ./_opencode/AGENTS.md;
|
||||
};
|
||||
|
||||
home.file = {
|
||||
"AGENTS.md".source = ./_ai-tools/AGENTS.md;
|
||||
".pi/agent/extensions/pi-elixir" = {
|
||||
source = inputs.pi-elixir;
|
||||
recursive = true;
|
||||
};
|
||||
".pi/agent/extensions/pi-mcp-adapter" = {
|
||||
source = "${pkgs.pi-mcp-adapter}/lib/node_modules/pi-mcp-adapter";
|
||||
recursive = true;
|
||||
};
|
||||
".pi/agent/extensions/no-git.ts".source = ./_ai-tools/extensions/no-git.ts;
|
||||
".pi/agent/extensions/no-scripting.ts".source = ./_ai-tools/extensions/no-scripting.ts;
|
||||
".pi/agent/extensions/note-ingest.ts".source = ./_ai-tools/extensions/note-ingest.ts;
|
||||
".pi/agent/extensions/review.ts".source = ./_ai-tools/extensions/review.ts;
|
||||
".pi/agent/extensions/session-name.ts".source = ./_ai-tools/extensions/session-name.ts;
|
||||
".pi/agent/notability" = {
|
||||
source = ./_notability;
|
||||
recursive = true;
|
||||
};
|
||||
".pi/agent/skills/elixir-dev" = {
|
||||
source = "${inputs.pi-elixir}/skills/elixir-dev";
|
||||
recursive = true;
|
||||
};
|
||||
".pi/agent/skills/jujutsu/SKILL.md".source = ./_ai-tools/skills/jujutsu/SKILL.md;
|
||||
".pi/agent/skills/notability-transcribe/SKILL.md".source = ./_ai-tools/skills/notability-transcribe/SKILL.md;
|
||||
".pi/agent/skills/notability-normalize/SKILL.md".source = ./_ai-tools/skills/notability-normalize/SKILL.md;
|
||||
".pi/agent/themes" = {
|
||||
source = "${inputs.pi-rose-pine}/themes";
|
||||
recursive = true;
|
||||
};
|
||||
".pi/agent/settings.json".text =
|
||||
builtins.toJSON {
|
||||
theme = "rose-pine-dawn";
|
||||
quietStartup = true;
|
||||
hideThinkingBlock = true;
|
||||
defaultProvider = "openai-codex";
|
||||
defaultModel = "gpt-5.4";
|
||||
defaultThinkingLevel = "high";
|
||||
packages = [
|
||||
{
|
||||
source = "${pkgs.pi-agent-stuff}/lib/node_modules/mitsupi";
|
||||
extensions = [
|
||||
"pi-extensions/answer.ts"
|
||||
"pi-extensions/context.ts"
|
||||
"pi-extensions/multi-edit.ts"
|
||||
"pi-extensions/todos.ts"
|
||||
];
|
||||
skills = [];
|
||||
prompts = [];
|
||||
themes = [];
|
||||
}
|
||||
{
|
||||
source = "${pkgs.pi-harness}/lib/node_modules/@aliou/pi-harness";
|
||||
extensions = ["extensions/breadcrumbs/index.ts"];
|
||||
skills = [];
|
||||
prompts = [];
|
||||
themes = [];
|
||||
}
|
||||
];
|
||||
};
|
||||
".pi/agent/mcp.json".source = ./_ai-tools/mcp.json;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
@@ -54,26 +54,6 @@
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
llm-agents.url = "github:numtide/llm-agents.nix";
|
||||
pi-agent-stuff = {
|
||||
url = "github:mitsuhiko/agent-stuff";
|
||||
flake = false;
|
||||
};
|
||||
pi-elixir = {
|
||||
url = "github:dannote/pi-elixir";
|
||||
flake = false;
|
||||
};
|
||||
pi-rose-pine = {
|
||||
url = "github:zenobi-us/pi-rose-pine";
|
||||
flake = false;
|
||||
};
|
||||
pi-harness = {
|
||||
url = "github:aliou/pi-harness";
|
||||
flake = false;
|
||||
};
|
||||
pi-mcp-adapter = {
|
||||
url = "github:nicobailon/pi-mcp-adapter";
|
||||
flake = false;
|
||||
};
|
||||
qmd.url = "github:tobi/qmd";
|
||||
# Overlay inputs
|
||||
himalaya.url = "github:pimalaya/himalaya";
|
||||
|
||||
@@ -26,7 +26,6 @@ in {
|
||||
];
|
||||
commonPath = with pkgs;
|
||||
[
|
||||
inputs'.llm-agents.packages.pi
|
||||
coreutils
|
||||
inotify-tools
|
||||
nushell
|
||||
|
||||
@@ -20,12 +20,6 @@
|
||||
(import ./_overlays/jj-ryu.nix {inherit inputs;})
|
||||
# cog-cli
|
||||
(import ./_overlays/cog-cli.nix {inherit inputs;})
|
||||
# pi-agent-stuff (mitsuhiko)
|
||||
(import ./_overlays/pi-agent-stuff.nix {inherit inputs;})
|
||||
# pi-harness (aliou)
|
||||
(import ./_overlays/pi-harness.nix {inherit inputs;})
|
||||
# pi-mcp-adapter
|
||||
(import ./_overlays/pi-mcp-adapter.nix {inherit inputs;})
|
||||
# qmd
|
||||
(import ./_overlays/qmd.nix {inherit inputs;})
|
||||
# jj-starship (passes through upstream overlay)
|
||||
|
||||
Reference in New Issue
Block a user