This commit is contained in:
2026-04-01 11:16:47 +00:00
parent 8b09aa9705
commit bbfcc366c2
26 changed files with 689 additions and 7614 deletions

73
flake.lock generated
View File

@@ -414,24 +414,6 @@
"type": "github"
}
},
"flake-utils_3": {
"inputs": {
"systems": "systems_6"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"himalaya": {
"inputs": {
"fenix": "fenix_2",
@@ -799,22 +781,6 @@
}
},
"nixpkgs_7": {
"locked": {
"lastModified": 1769188852,
"narHash": "sha256-aBAGyMum27K7cP5OR7BMioJOF3icquJMZDDgk6ZEg1A=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "a1bab9e494f5f4939442a57a58d0449a109593fe",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_8": {
"locked": {
"lastModified": 1765934234,
"narHash": "sha256-pJjWUzNnjbIAMIc5gRFUuKCDQ9S1cuh3b2hKgA7Mc4A=",
@@ -866,25 +832,6 @@
"type": "github"
}
},
"qmd": {
"inputs": {
"flake-utils": "flake-utils_2",
"nixpkgs": "nixpkgs_7"
},
"locked": {
"lastModified": 1774742449,
"narHash": "sha256-x6+O8KX2LVqL49MLZsvyENITC5pY+IiTrI59OSwxurU=",
"owner": "tobi",
"repo": "qmd",
"rev": "1fb2e2819e4024045203b4ea550ec793683baf2b",
"type": "github"
},
"original": {
"owner": "tobi",
"repo": "qmd",
"type": "github"
}
},
"root": {
"inputs": {
"code-review-nvim": "code-review-nvim",
@@ -914,7 +861,6 @@
"nixpkgs"
],
"nixvim": "nixvim",
"qmd": "qmd",
"sops-nix": "sops-nix",
"zjstatus": "zjstatus"
}
@@ -1086,21 +1032,6 @@
"type": "github"
}
},
"systems_6": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"treefmt-nix": {
"inputs": {
"nixpkgs": [
@@ -1143,8 +1074,8 @@
"zjstatus": {
"inputs": {
"crane": "crane",
"flake-utils": "flake-utils_3",
"nixpkgs": "nixpkgs_8",
"flake-utils": "flake-utils_2",
"nixpkgs": "nixpkgs_7",
"rust-overlay": "rust-overlay"
},
"locked": {

View File

@@ -68,7 +68,6 @@
nixpkgs.url = "github:nixos/nixpkgs/master";
nixpkgs-lib.follows = "nixpkgs";
nixvim.url = "github:nix-community/nixvim";
qmd.url = "github:tobi/qmd";
sops-nix = {
url = "github:Mic92/sops-nix";
inputs.nixpkgs.follows = "nixpkgs";

View File

@@ -1,141 +0,0 @@
#!/usr/bin/env nu
use ./lib.nu *
def active-job-exists [note_id: string, source_hash: string] {
let rows = (sql-json $"
select job_id
from jobs
where note_id = (sql-quote $note_id)
and source_hash = (sql-quote $source_hash)
and status != 'done'
and status != 'failed'
limit 1;
")
not ($rows | is-empty)
}
export def archive-and-version [note_id: string, source_path: path, source_relpath: string, source_size: any, source_mtime: string, source_hash: string] {
let source_size_int = ($source_size | into int)
let archive_path = (archive-path-for $note_id $source_hash $source_relpath)
cp $source_path $archive_path
let version_id = (new-version-id)
let seen_at = (now-iso)
let version_id_q = (sql-quote $version_id)
let note_id_q = (sql-quote $note_id)
let seen_at_q = (sql-quote $seen_at)
let archive_path_q = (sql-quote $archive_path)
let source_hash_q = (sql-quote $source_hash)
let source_mtime_q = (sql-quote $source_mtime)
let source_relpath_q = (sql-quote $source_relpath)
let sql = ([
"insert into versions (version_id, note_id, seen_at, archive_path, source_hash, source_size, source_mtime, source_relpath, ingest_result, session_path) values ("
$version_id_q
", "
$note_id_q
", "
$seen_at_q
", "
$archive_path_q
", "
$source_hash_q
", "
($source_size_int | into string)
", "
$source_mtime_q
", "
$source_relpath_q
", 'pending', null);"
] | str join '')
sql-run $sql | ignore
{
version_id: $version_id
seen_at: $seen_at
archive_path: $archive_path
}
}
export def enqueue-job [
note: record,
operation: string,
input_path: string,
archive_path: string,
source_hash: string,
title: string,
force_overwrite_generated: bool = false,
source_transport: string = 'webdav',
] {
if (active-job-exists $note.note_id $source_hash) {
return null
}
let job_id = (new-job-id)
let requested_at = (now-iso)
let manifest_path = (manifest-path-for $job_id 'queued')
let result_path = (result-path-for $job_id)
let transcript_path = (transcript-path-for $note.note_id $job_id)
let session_dir = ([(sessions-root) $note.note_id $job_id] | path join)
mkdir $session_dir
let manifest = {
version: 1
job_id: $job_id
note_id: $note.note_id
operation: $operation
requested_at: $requested_at
title: $title
source_relpath: $note.source_relpath
source_path: $note.source_path
input_path: $input_path
archive_path: $archive_path
output_path: $note.output_path
transcript_path: $transcript_path
result_path: $result_path
session_dir: $session_dir
source_hash: $source_hash
last_generated_output_hash: ($note.last_generated_output_hash? | default null)
force_overwrite_generated: $force_overwrite_generated
source_transport: $source_transport
}
($manifest | to json --indent 2) | save -f $manifest_path
let job_id_q = (sql-quote $job_id)
let note_id_q = (sql-quote $note.note_id)
let operation_q = (sql-quote $operation)
let requested_at_q = (sql-quote $requested_at)
let source_hash_q = (sql-quote $source_hash)
let manifest_path_q = (sql-quote $manifest_path)
let result_path_q = (sql-quote $result_path)
let sql = ([
"insert into jobs (job_id, note_id, operation, status, requested_at, source_hash, job_manifest_path, result_path) values ("
$job_id_q
", "
$note_id_q
", "
$operation_q
", 'queued', "
$requested_at_q
", "
$source_hash_q
", "
$manifest_path_q
", "
$result_path_q
");"
] | str join '')
sql-run $sql | ignore
{
job_id: $job_id
requested_at: $requested_at
manifest_path: $manifest_path
result_path: $result_path
transcript_path: $transcript_path
session_dir: $session_dir
}
}

View File

@@ -1,433 +0,0 @@
export def home-dir [] {
$nu.home-dir
}
export def data-root [] {
if ('NOTABILITY_DATA_ROOT' in ($env | columns)) {
$env.NOTABILITY_DATA_ROOT
} else {
[$nu.home-dir ".local" "share" "notability-ingest"] | path join
}
}
export def state-root [] {
if ('NOTABILITY_STATE_ROOT' in ($env | columns)) {
$env.NOTABILITY_STATE_ROOT
} else {
[$nu.home-dir ".local" "state" "notability-ingest"] | path join
}
}
export def notes-root [] {
if ('NOTABILITY_NOTES_DIR' in ($env | columns)) {
$env.NOTABILITY_NOTES_DIR
} else {
[$nu.home-dir "Notes"] | path join
}
}
export def webdav-root [] {
if ('NOTABILITY_WEBDAV_ROOT' in ($env | columns)) {
$env.NOTABILITY_WEBDAV_ROOT
} else {
[(data-root) "webdav-root"] | path join
}
}
export def archive-root [] {
if ('NOTABILITY_ARCHIVE_ROOT' in ($env | columns)) {
$env.NOTABILITY_ARCHIVE_ROOT
} else {
[(data-root) "archive"] | path join
}
}
export def render-root [] {
if ('NOTABILITY_RENDER_ROOT' in ($env | columns)) {
$env.NOTABILITY_RENDER_ROOT
} else {
[(data-root) "rendered-pages"] | path join
}
}
export def transcript-root [] {
if ('NOTABILITY_TRANSCRIPT_ROOT' in ($env | columns)) {
$env.NOTABILITY_TRANSCRIPT_ROOT
} else {
[(state-root) "transcripts"] | path join
}
}
export def jobs-root [] {
if ('NOTABILITY_JOBS_ROOT' in ($env | columns)) {
$env.NOTABILITY_JOBS_ROOT
} else {
[(state-root) "jobs"] | path join
}
}
export def queued-root [] {
[(jobs-root) "queued"] | path join
}
export def running-root [] {
[(jobs-root) "running"] | path join
}
export def failed-root [] {
[(jobs-root) "failed"] | path join
}
export def done-root [] {
[(jobs-root) "done"] | path join
}
export def results-root [] {
[(jobs-root) "results"] | path join
}
export def sessions-root [] {
if ('NOTABILITY_SESSIONS_ROOT' in ($env | columns)) {
$env.NOTABILITY_SESSIONS_ROOT
} else {
[(state-root) "sessions"] | path join
}
}
export def qmd-dirty-file [] {
[(state-root) "qmd-dirty"] | path join
}
export def db-path [] {
if ('NOTABILITY_DB_PATH' in ($env | columns)) {
$env.NOTABILITY_DB_PATH
} else {
[(state-root) "db.sqlite"] | path join
}
}
export def now-iso [] {
date now | format date "%Y-%m-%dT%H:%M:%SZ"
}
export def sql-quote [value?: any] {
if $value == null {
"NULL"
} else {
let text = ($value | into string | str replace -a "'" "''")
["'" $text "'"] | str join ''
}
}
export def sql-run [sql: string] {
let database = (db-path)
let result = (^sqlite3 -cmd '.timeout 5000' $database $sql | complete)
if $result.exit_code != 0 {
error make {
msg: $"sqlite3 failed: ($result.stderr | str trim)"
}
}
$result.stdout
}
export def sql-json [sql: string] {
let database = (db-path)
let result = (^sqlite3 -cmd '.timeout 5000' -json $database $sql | complete)
if $result.exit_code != 0 {
error make {
msg: $"sqlite3 failed: ($result.stderr | str trim)"
}
}
let text = ($result.stdout | str trim)
if $text == "" {
[]
} else {
$text | from json
}
}
export def ensure-layout [] {
mkdir (data-root)
mkdir (state-root)
mkdir (notes-root)
mkdir (webdav-root)
mkdir (archive-root)
mkdir (render-root)
mkdir (transcript-root)
mkdir (jobs-root)
mkdir (queued-root)
mkdir (running-root)
mkdir (failed-root)
mkdir (done-root)
mkdir (results-root)
mkdir (sessions-root)
sql-run '
create table if not exists notes (
note_id text primary key,
source_relpath text not null unique,
title text not null,
output_path text not null,
status text not null,
first_seen_at text not null,
last_seen_at text not null,
last_processed_at text,
missing_since text,
deleted_at text,
current_source_hash text,
current_source_size integer,
current_source_mtime text,
current_archive_path text,
latest_version_id text,
last_generated_source_hash text,
last_generated_output_hash text,
conflict_path text,
last_error text
);
create table if not exists versions (
version_id text primary key,
note_id text not null,
seen_at text not null,
archive_path text not null unique,
source_hash text not null,
source_size integer not null,
source_mtime text not null,
source_relpath text not null,
ingest_result text,
session_path text,
foreign key (note_id) references notes (note_id)
);
create table if not exists jobs (
job_id text primary key,
note_id text not null,
operation text not null,
status text not null,
requested_at text not null,
started_at text,
finished_at text,
source_hash text,
job_manifest_path text not null,
result_path text not null,
error_summary text,
foreign key (note_id) references notes (note_id)
);
create table if not exists events (
id integer primary key autoincrement,
note_id text not null,
ts text not null,
kind text not null,
details text,
foreign key (note_id) references notes (note_id)
);
create index if not exists idx_jobs_status_requested_at on jobs(status, requested_at);
create index if not exists idx_versions_note_id_seen_at on versions(note_id, seen_at);
create index if not exists idx_events_note_id_ts on events(note_id, ts);
'
| ignore
}
export def log-event [note_id: string, kind: string, details?: any] {
let payload = if $details == null { null } else { $details | to json }
let note_id_q = (sql-quote $note_id)
let now_q = (sql-quote (now-iso))
let kind_q = (sql-quote $kind)
let payload_q = (sql-quote $payload)
let sql = ([
"insert into events (note_id, ts, kind, details) values ("
$note_id_q
", "
$now_q
", "
$kind_q
", "
$payload_q
");"
] | str join '')
sql-run $sql | ignore
}
export def slugify [value: string] {
let slug = (
$value
| str downcase
| str replace -r '[^a-z0-9]+' '-'
| str replace -r '^-+' ''
| str replace -r '-+$' ''
)
if $slug == '' {
'note'
} else {
$slug
}
}
export def sha256 [file: path] {
(^sha256sum $file | lines | first | split row ' ' | first)
}
export def parse-output-frontmatter [file: path] {
if not ($file | path exists) {
{}
} else {
let content = (open --raw $file)
if not ($content | str starts-with "---\n") {
{}
} else {
let rest = ($content | str substring 4..)
let end = ($rest | str index-of "\n---\n")
if $end == null {
{}
} else {
let block = ($rest | str substring 0..($end - 1))
$block
| lines
| where ($it | str contains ':')
| reduce --fold {} {|line, acc|
let idx = ($line | str index-of ':')
if $idx == null {
$acc
} else {
let key = ($line | str substring 0..($idx - 1) | str trim)
let value = ($line | str substring ($idx + 1).. | str trim)
$acc | upsert $key $value
}
}
}
}
}
}
export def zk-generated-note-path [title: string] {
let root = (notes-root)
let effective_title = if ($title | str trim) == '' {
'Imported note'
} else {
$title
}
let result = (
^zk --notebook-dir $root --working-dir $root new $root --no-input --title $effective_title --print-path --dry-run
| complete
)
if $result.exit_code != 0 {
error make {
msg: $"zk failed to generate a note path: ($result.stderr | str trim)"
}
}
let path_text = ($result.stderr | str trim)
if $path_text == '' {
error make {
msg: 'zk did not return a generated note path'
}
}
$path_text
| lines
| last
| str trim
}
export def new-note-id [] {
let suffix = (random uuid | str replace -a '-' '')
$"ntl_($suffix)"
}
export def new-job-id [] {
let suffix = (random uuid | str replace -a '-' '')
$"job_($suffix)"
}
export def new-version-id [] {
let suffix = (random uuid | str replace -a '-' '')
$"ver_($suffix)"
}
export def archive-path-for [note_id: string, source_hash: string, source_relpath: string] {
let stamp = (date now | format date "%Y-%m-%dT%H-%M-%SZ")
let short = ($source_hash | str substring 0..11)
let directory = [(archive-root) $note_id] | path join
let parsed = ($source_relpath | path parse)
let extension = if (($parsed.extension? | default '') | str trim) == '' {
'bin'
} else {
($parsed.extension | str downcase)
}
mkdir $directory
[$directory $"($stamp)-($short).($extension)"] | path join
}
export def transcript-path-for [note_id: string, job_id: string] {
let directory = [(transcript-root) $note_id] | path join
mkdir $directory
[$directory $"($job_id).md"] | path join
}
export def result-path-for [job_id: string] {
[(results-root) $"($job_id).json"] | path join
}
export def manifest-path-for [job_id: string, status: string] {
let root = match $status {
'queued' => (queued-root)
'running' => (running-root)
'failed' => (failed-root)
'done' => (done-root)
_ => (queued-root)
}
[$root $"($job_id).json"] | path join
}
export def note-output-path [title: string] {
zk-generated-note-path $title
}
export def is-supported-source-path [path: string] {
let lower = ($path | str downcase)
(($lower | str ends-with '.pdf') or ($lower | str ends-with '.png'))
}
export def is-ignored-path [relpath: string] {
let lower = ($relpath | str downcase)
let hidden = (($lower | str contains '/.') or ($lower | str starts-with '.'))
let temp = (($lower | str contains '/~') or ($lower | str ends-with '.tmp') or ($lower | str ends-with '.part'))
let conflict = ($lower | str contains '.sync-conflict')
($hidden or $temp or $conflict)
}
export def scan-source-files [] {
let root = (webdav-root)
if not ($root | path exists) {
[]
} else {
let files = ([
(glob $"($root)/**/*.pdf")
(glob $"($root)/**/*.PDF")
(glob $"($root)/**/*.png")
(glob $"($root)/**/*.PNG")
] | flatten)
$files
| sort
| uniq
| each {|file|
let relpath = ($file | path relative-to $root)
if ((is-ignored-path $relpath) or not (is-supported-source-path $file)) {
null
} else {
let stat = (ls -l $file | first)
{
source_path: $file
source_relpath: $relpath
source_size: $stat.size
source_mtime: ($stat.modified | format date "%Y-%m-%dT%H:%M:%SZ")
title: (($relpath | path parse).stem)
}
}
}
| where $it != null
}
}

View File

@@ -1,387 +0,0 @@
#!/usr/bin/env nu
use ./lib.nu *
use ./jobs.nu [archive-and-version, enqueue-job]
const settle_window = 45sec
const delete_grace = 15min
def settle-remaining [source_mtime: string] {
let modified = ($source_mtime | into datetime)
let age = ((date now) - $modified)
if $age >= $settle_window {
0sec
} else {
$settle_window - $age
}
}
def is-settled [source_mtime: string] {
let modified = ($source_mtime | into datetime)
((date now) - $modified) >= $settle_window
}
def log-job-enqueued [note_id: string, job_id: string, operation: string, source_hash: string, archive_path: string] {
log-event $note_id 'job-enqueued' {
job_id: $job_id
operation: $operation
source_hash: $source_hash
archive_path: $archive_path
}
}
def find-rename-candidate [source_hash: string] {
sql-json $"
select *
from notes
where current_source_hash = (sql-quote $source_hash)
and status != 'active'
and status != 'failed'
and status != 'conflict'
order by last_seen_at desc
limit 1;
"
}
def touch-note [note_id: string, source_size: any, source_mtime: string, status: string = 'active'] {
let source_size_int = ($source_size | into int)
let now_q = (sql-quote (now-iso))
let source_mtime_q = (sql-quote $source_mtime)
let status_q = (sql-quote $status)
let note_id_q = (sql-quote $note_id)
sql-run $"
update notes
set last_seen_at = ($now_q),
current_source_size = ($source_size_int),
current_source_mtime = ($source_mtime_q),
status = ($status_q)
where note_id = ($note_id_q);
"
| ignore
}
def process-existing [note: record, source: record] {
let title = $source.title
let note_id = ($note | get note_id)
let note_status = ($note | get status)
let source_size_int = ($source.source_size | into int)
if not (is-settled $source.source_mtime) {
touch-note $note_id $source_size_int $source.source_mtime $note_status
return
}
let previous_size = ($note.current_source_size? | default (-1))
let previous_mtime = ($note.current_source_mtime? | default '')
let size_changed = ($previous_size != $source_size_int)
let mtime_changed = ($previous_mtime != $source.source_mtime)
let needs_ingest = (($note.last_generated_source_hash? | default '') != ($note.current_source_hash? | default ''))
let hash_needed = ($note.current_source_hash? | default null) == null or $size_changed or $mtime_changed or ($note_status != 'active') or $needs_ingest
if not $hash_needed {
let now_q = (sql-quote (now-iso))
let title_q = (sql-quote $title)
let note_id_q = (sql-quote $note_id)
sql-run $"
update notes
set last_seen_at = ($now_q),
status = 'active',
title = ($title_q),
missing_since = null,
deleted_at = null
where note_id = ($note_id_q);
"
| ignore
return
}
let source_hash = (sha256 $source.source_path)
if ($source_hash == ($note.current_source_hash? | default '')) {
let now_q = (sql-quote (now-iso))
let title_q = (sql-quote $title)
let source_mtime_q = (sql-quote $source.source_mtime)
let note_id_q = (sql-quote $note_id)
let next_status = if $note_status == 'failed' { 'failed' } else { 'active' }
sql-run $"
update notes
set last_seen_at = ($now_q),
title = ($title_q),
status = (sql-quote $next_status),
missing_since = null,
deleted_at = null,
current_source_size = ($source_size_int),
current_source_mtime = ($source_mtime_q)
where note_id = ($note_id_q);
"
| ignore
let should_enqueue = ($note_status == 'failed' or (($note.last_generated_source_hash? | default '') != $source_hash))
if not $should_enqueue {
return
}
let archive_path = if (($note.current_archive_path? | default '') | str trim) == '' {
let version = (archive-and-version $note_id $source.source_path $source.source_relpath $source_size_int $source.source_mtime $source_hash)
let archive_path_q = (sql-quote $version.archive_path)
let version_id_q = (sql-quote $version.version_id)
sql-run $"
update notes
set current_archive_path = ($archive_path_q),
latest_version_id = ($version_id_q)
where note_id = ($note_id_q);
"
| ignore
$version.archive_path
} else {
$note.current_archive_path
}
let runtime_note = ($note | upsert source_path $source.source_path | upsert source_relpath $source.source_relpath | upsert output_path $note.output_path | upsert last_generated_output_hash ($note.last_generated_output_hash? | default null))
let retry_job = (enqueue-job $runtime_note 'upsert' $archive_path $archive_path $source_hash $title)
if $retry_job != null {
log-job-enqueued $note_id $retry_job.job_id 'upsert' $source_hash $archive_path
let reason = if $note_status == 'failed' {
'retry-failed-note'
} else {
'missing-generated-output'
}
log-event $note_id 'job-reenqueued' {
job_id: $retry_job.job_id
reason: $reason
source_hash: $source_hash
archive_path: $archive_path
}
}
return
}
let version = (archive-and-version $note_id $source.source_path $source.source_relpath $source_size_int $source.source_mtime $source_hash)
let now_q = (sql-quote (now-iso))
let title_q = (sql-quote $title)
let source_hash_q = (sql-quote $source_hash)
let source_mtime_q = (sql-quote $source.source_mtime)
let archive_path_q = (sql-quote $version.archive_path)
let version_id_q = (sql-quote $version.version_id)
let note_id_q = (sql-quote $note_id)
sql-run $"
update notes
set last_seen_at = ($now_q),
title = ($title_q),
status = 'active',
missing_since = null,
deleted_at = null,
current_source_hash = ($source_hash_q),
current_source_size = ($source_size_int),
current_source_mtime = ($source_mtime_q),
current_archive_path = ($archive_path_q),
latest_version_id = ($version_id_q),
last_error = null
where note_id = ($note_id_q);
"
| ignore
let runtime_note = ($note | upsert source_path $source.source_path | upsert source_relpath $source.source_relpath | upsert output_path $note.output_path | upsert last_generated_output_hash ($note.last_generated_output_hash? | default null))
let job = (enqueue-job $runtime_note 'upsert' $version.archive_path $version.archive_path $source_hash $title)
if $job != null {
log-job-enqueued $note_id $job.job_id 'upsert' $source_hash $version.archive_path
}
log-event $note_id 'source-updated' {
source_relpath: $source.source_relpath
source_hash: $source_hash
archive_path: $version.archive_path
}
}
def process-new [source: record] {
if not (is-settled $source.source_mtime) {
return
}
let source_hash = (sha256 $source.source_path)
let source_size_int = ($source.source_size | into int)
let rename_candidates = (find-rename-candidate $source_hash)
if not ($rename_candidates | is-empty) {
let rename_candidate = ($rename_candidates | first)
let source_relpath_q = (sql-quote $source.source_relpath)
let title_q = (sql-quote $source.title)
let now_q = (sql-quote (now-iso))
let source_mtime_q = (sql-quote $source.source_mtime)
let note_id_q = (sql-quote $rename_candidate.note_id)
sql-run $"
update notes
set source_relpath = ($source_relpath_q),
title = ($title_q),
last_seen_at = ($now_q),
status = 'active',
missing_since = null,
deleted_at = null,
current_source_size = ($source_size_int),
current_source_mtime = ($source_mtime_q)
where note_id = ($note_id_q);
"
| ignore
log-event $rename_candidate.note_id 'source-renamed' {
from: $rename_candidate.source_relpath
to: $source.source_relpath
}
return
}
let note_id = (new-note-id)
let first_seen_at = (now-iso)
let output_path = (note-output-path $source.title)
let version = (archive-and-version $note_id $source.source_path $source.source_relpath $source_size_int $source.source_mtime $source_hash)
let note_id_q = (sql-quote $note_id)
let source_relpath_q = (sql-quote $source.source_relpath)
let title_q = (sql-quote $source.title)
let output_path_q = (sql-quote $output_path)
let first_seen_q = (sql-quote $first_seen_at)
let source_hash_q = (sql-quote $source_hash)
let source_mtime_q = (sql-quote $source.source_mtime)
let archive_path_q = (sql-quote $version.archive_path)
let version_id_q = (sql-quote $version.version_id)
let sql = ([
"insert into notes (note_id, source_relpath, title, output_path, status, first_seen_at, last_seen_at, current_source_hash, current_source_size, current_source_mtime, current_archive_path, latest_version_id) values ("
$note_id_q
", "
$source_relpath_q
", "
$title_q
", "
$output_path_q
", 'active', "
$first_seen_q
", "
$first_seen_q
", "
$source_hash_q
", "
($source_size_int | into string)
", "
$source_mtime_q
", "
$archive_path_q
", "
$version_id_q
");"
] | str join '')
sql-run $sql | ignore
let note = {
note_id: $note_id
source_relpath: $source.source_relpath
source_path: $source.source_path
output_path: $output_path
last_generated_output_hash: null
}
let job = (enqueue-job $note 'upsert' $version.archive_path $version.archive_path $source_hash $source.title)
if $job != null {
log-job-enqueued $note_id $job.job_id 'upsert' $source_hash $version.archive_path
}
log-event $note_id 'source-discovered' {
source_relpath: $source.source_relpath
source_hash: $source_hash
archive_path: $version.archive_path
output_path: $output_path
}
}
def mark-missing [seen_relpaths: list<string>] {
let notes = (sql-json 'select note_id, source_relpath, status, missing_since from notes;')
for note in $notes {
if ($seen_relpaths | any {|rel| $rel == $note.source_relpath }) {
continue
}
if $note.status == 'active' {
let missing_since = (now-iso)
let missing_since_q = (sql-quote $missing_since)
let note_id_q = (sql-quote $note.note_id)
sql-run $"
update notes
set status = 'source_missing',
missing_since = ($missing_since_q)
where note_id = ($note_id_q);
"
| ignore
log-event $note.note_id 'source-missing' {
source_relpath: $note.source_relpath
}
continue
}
if $note.status == 'source_missing' and ($note.missing_since? | default null) != null {
let missing_since = ($note.missing_since | into datetime)
if ((date now) - $missing_since) >= $delete_grace {
let deleted_at = (now-iso)
let deleted_at_q = (sql-quote $deleted_at)
let note_id_q = (sql-quote $note.note_id)
sql-run $"
update notes
set status = 'source_deleted',
deleted_at = ($deleted_at_q)
where note_id = ($note_id_q);
"
| ignore
log-event $note.note_id 'source-deleted' {
source_relpath: $note.source_relpath
}
}
}
}
}
export def reconcile-run [] {
ensure-layout
mut sources = (scan-source-files)
let unsettled = (
$sources
| each {|source|
{
source_path: $source.source_path
remaining: (settle-remaining $source.source_mtime)
}
}
| where remaining > 0sec
)
if not ($unsettled | is-empty) {
let max_remaining = ($unsettled | get remaining | math max)
print $"Waiting ($max_remaining) for recent Notability uploads to settle"
sleep ($max_remaining + 2sec)
$sources = (scan-source-files)
}
for source in $sources {
let existing_rows = (sql-json $"
select *
from notes
where source_relpath = (sql-quote $source.source_relpath)
limit 1;
")
if (($existing_rows | length) == 0) {
process-new $source
} else {
let existing = ($existing_rows | first)
process-existing ($existing | upsert source_path $source.source_path) $source
}
}
mark-missing ($sources | get source_relpath)
}
def main [] {
reconcile-run
}

View File

@@ -1,148 +0,0 @@
#!/usr/bin/env nu
use ./lib.nu *
use ./jobs.nu [archive-and-version, enqueue-job]
use ./worker.nu [worker-run]
def latest-version [note_id: string] {
sql-json $"
select *
from versions
where note_id = (sql-quote $note_id)
order by seen_at desc
limit 1;
"
| first
}
def existing-active-job [note_id: string, source_hash: string] {
sql-json $"
select job_id
from jobs
where note_id = (sql-quote $note_id)
and source_hash = (sql-quote $source_hash)
and status != 'done'
and status != 'failed'
order by requested_at desc
limit 1;
"
| first
}
def archive-current-source [note: record] {
if not ($note.source_path | path exists) {
error make {
msg: $"Current source path is missing: ($note.source_path)"
}
}
let source_hash = (sha256 $note.source_path)
let source_size = (((ls -l $note.source_path | first).size) | into int)
let source_mtime = (((ls -l $note.source_path | first).modified) | format date "%Y-%m-%dT%H:%M:%SZ")
let version = (archive-and-version $note.note_id $note.source_path $note.source_relpath $source_size $source_mtime $source_hash)
sql-run $"
update notes
set current_source_hash = (sql-quote $source_hash),
current_source_size = ($source_size),
current_source_mtime = (sql-quote $source_mtime),
current_archive_path = (sql-quote $version.archive_path),
latest_version_id = (sql-quote $version.version_id),
last_seen_at = (sql-quote (now-iso)),
status = 'active',
missing_since = null,
deleted_at = null
where note_id = (sql-quote $note.note_id);
"
| ignore
{
input_path: $version.archive_path
archive_path: $version.archive_path
source_hash: $source_hash
}
}
def enqueue-reingest-job [note: record, source_hash: string, input_path: string, archive_path: string, force_overwrite_generated: bool] {
let job = (enqueue-job $note 'reingest' $input_path $archive_path $source_hash $note.title $force_overwrite_generated)
if $job == null {
let existing = (existing-active-job $note.note_id $source_hash)
print $"Already queued: ($existing.job_id? | default 'unknown')"
return
}
log-event $note.note_id 'reingest-enqueued' {
job_id: $job.job_id
source_hash: $source_hash
archive_path: $archive_path
force_overwrite_generated: $force_overwrite_generated
}
print $"Enqueued ($job.job_id) for ($note.note_id)"
try {
worker-run --drain
} catch {|error|
error make {
msg: (($error.msg? | default ($error | to nuon)) | into string)
}
}
}
def main [note_id: string, --latest-source, --latest-archive, --force-overwrite-generated] {
ensure-layout
let note_row = (sql-json $"
select *
from notes
where note_id = (sql-quote $note_id)
limit 1;
" | first)
let note = if $note_row == null {
null
} else {
$note_row | upsert source_path ([ (webdav-root) $note_row.source_relpath ] | path join)
}
if $note == null {
error make {
msg: $"Unknown note id: ($note_id)"
}
}
if $latest_source and $latest_archive {
error make {
msg: 'Choose only one of --latest-source or --latest-archive'
}
}
let source_mode = if $latest_source {
'source'
} else if $latest_archive {
'archive'
} else if ($note.status == 'active' and ($note.source_path | path exists)) {
'source'
} else {
'archive'
}
if $source_mode == 'source' {
let archived = (archive-current-source $note)
enqueue-reingest-job $note $archived.source_hash $archived.input_path $archived.archive_path $force_overwrite_generated
return
}
let version = (latest-version $note.note_id)
if $version == null {
error make {
msg: $"No archived version found for ($note.note_id)"
}
}
enqueue-reingest-job $note $version.source_hash $version.archive_path $version.archive_path $force_overwrite_generated
}

View File

@@ -1,202 +0,0 @@
#!/usr/bin/env nu
use ./lib.nu *
def format-summary [] {
let counts = (sql-json '
select status, count(*) as count
from notes
group by status
order by status;
')
let queue = (sql-json "
select status, count(*) as count
from jobs
where status in ('queued', 'running', 'failed')
group by status
order by status;
")
let lines = [
$"notes db: (db-path)"
$"webdav root: (webdav-root)"
$"notes root: (notes-root)"
''
'notes:'
]
let note_statuses = ('active,source_missing,source_deleted,conflict,failed' | split row ',')
let note_lines = (
$note_statuses
| each {|status|
let row = ($counts | where {|row| ($row | get 'status') == $status } | first)
let count = ($row.count? | default 0)
$" ($status): ($count)"
}
)
let job_statuses = ('queued,running,failed' | split row ',')
let job_lines = (
$job_statuses
| each {|status|
let row = ($queue | where {|row| ($row | get 'status') == $status } | first)
let count = ($row.count? | default 0)
$" ($status): ($count)"
}
)
($lines ++ $note_lines ++ ['' 'jobs:'] ++ $job_lines ++ ['']) | str join "\n"
}
def format-note [note_id: string] {
let note = (sql-json $"
select *
from notes
where note_id = (sql-quote $note_id)
limit 1;
" | first)
if $note == null {
error make {
msg: $"Unknown note id: ($note_id)"
}
}
let jobs = (sql-json $"
select job_id, operation, status, requested_at, started_at, finished_at, source_hash, error_summary
from jobs
where note_id = (sql-quote $note_id)
order by requested_at desc
limit 5;
")
let events = (sql-json $"
select ts, kind, details
from events
where note_id = (sql-quote $note_id)
order by ts desc
limit 10;
")
let output_exists = ($note.output_path | path exists)
let frontmatter = (parse-output-frontmatter $note.output_path)
let lines = [
$"note_id: ($note.note_id)"
$"title: ($note.title)"
$"status: ($note.status)"
$"source_relpath: ($note.source_relpath)"
$"output_path: ($note.output_path)"
$"output_exists: ($output_exists)"
$"managed_by: ($frontmatter.managed_by? | default '')"
$"frontmatter_note_id: ($frontmatter.note_id? | default '')"
$"current_source_hash: ($note.current_source_hash? | default '')"
$"last_generated_output_hash: ($note.last_generated_output_hash? | default '')"
$"current_archive_path: ($note.current_archive_path? | default '')"
$"last_processed_at: ($note.last_processed_at? | default '')"
$"missing_since: ($note.missing_since? | default '')"
$"deleted_at: ($note.deleted_at? | default '')"
$"conflict_path: ($note.conflict_path? | default '')"
$"last_error: ($note.last_error? | default '')"
''
'recent jobs:'
]
let job_lines = if ($jobs | is-empty) {
[' (none)']
} else {
$jobs | each {|job|
$" ($job.job_id) [($job.status)] ($job.operation) requested=($job.requested_at) error=($job.error_summary? | default '')"
}
}
let event_lines = if ($events | is-empty) {
[' (none)']
} else {
$events | each {|event|
$" ($event.ts) ($event.kind) ($event.details? | default '')"
}
}
($lines ++ $job_lines ++ ['' 'recent events:'] ++ $event_lines ++ ['']) | str join "\n"
}
def format-filtered [status: string, label: string] {
let notes = (sql-json $"
select note_id, title, source_relpath, output_path, status, last_error, conflict_path
from notes
where status = (sql-quote $status)
order by last_seen_at desc;
")
let header = [$label]
let body = if ($notes | is-empty) {
[' (none)']
} else {
$notes | each {|note|
let extra = if $status == 'conflict' {
$" conflict_path=($note.conflict_path? | default '')"
} else if $status == 'failed' {
$" last_error=($note.last_error? | default '')"
} else {
''
}
$" ($note.note_id) ($note.title) [($note.status)] source=($note.source_relpath) output=($note.output_path)($extra)"
}
}
($header ++ $body ++ ['']) | str join "\n"
}
def format-queue [] {
let jobs = (sql-json "
select job_id, note_id, operation, status, requested_at, started_at, error_summary
from jobs
where status in ('queued', 'running', 'failed')
order by requested_at asc;
")
let lines = if ($jobs | is-empty) {
['queue' ' (empty)' '']
} else {
['queue'] ++ ($jobs | each {|job|
$" ($job.job_id) note=($job.note_id) [($job.status)] ($job.operation) requested=($job.requested_at) error=($job.error_summary? | default '')"
}) ++ ['']
}
$lines | str join "\n"
}
def main [note_id?: string, --failed, --queue, --deleted, --conflicts] {
ensure-layout
if $queue {
print (format-queue)
return
}
if $failed {
print (format-filtered 'failed' 'failed notes')
return
}
if $deleted {
print (format-filtered 'source_deleted' 'deleted notes')
return
}
if $conflicts {
print (format-filtered 'conflict' 'conflict notes')
return
}
if $note_id != null {
print (format-note $note_id)
return
}
print (format-summary)
}

View File

@@ -1,58 +0,0 @@
#!/usr/bin/env nu
use ./lib.nu *
use ./reconcile.nu [reconcile-run]
use ./worker.nu [worker-run]
def error-message [error: any] {
let msg = (($error.msg? | default '') | into string)
if $msg == '' {
$error | to nuon
} else {
$msg
}
}
def run-worker [] {
try {
worker-run --drain
} catch {|error|
print $"worker failed: (error-message $error)"
}
}
def run-sync [] {
run-worker
try {
reconcile-run
} catch {|error|
print $"reconcile failed: (error-message $error)"
return
}
run-worker
}
def main [] {
ensure-layout
let root = (webdav-root)
print $"Watching ($root) for Notability WebDAV updates"
run-sync
^inotifywait -m -r --format '%w%f' -e create -e close_write -e moved_to -e moved_from -e delete -e attrib $root
| lines
| each {|changed_path|
if not (is-supported-source-path $changed_path) {
return
}
print $"Filesystem event for ($changed_path)"
run-sync
}
}

View File

@@ -1,36 +0,0 @@
#!/usr/bin/env nu
use ./lib.nu *
def main [] {
ensure-layout
let root = (webdav-root)
let addr = if ('NOTABILITY_WEBDAV_ADDR' in ($env | columns)) {
$env.NOTABILITY_WEBDAV_ADDR
} else {
'127.0.0.1:9980'
}
let user = if ('NOTABILITY_WEBDAV_USER' in ($env | columns)) {
$env.NOTABILITY_WEBDAV_USER
} else {
'notability'
}
let baseurl = if ('NOTABILITY_WEBDAV_BASEURL' in ($env | columns)) {
$env.NOTABILITY_WEBDAV_BASEURL
} else {
'/'
}
let password_file = if ('NOTABILITY_WEBDAV_PASSWORD_FILE' in ($env | columns)) {
$env.NOTABILITY_WEBDAV_PASSWORD_FILE
} else {
error make {
msg: 'NOTABILITY_WEBDAV_PASSWORD_FILE is required'
}
}
let password = (open --raw $password_file | str trim)
print $"Starting WebDAV on ($addr), serving ($root), base URL ($baseurl)"
run-external rclone 'serve' 'webdav' $root '--addr' $addr '--baseurl' $baseurl '--user' $user '--pass' $password
}

View File

@@ -1,506 +0,0 @@
#!/usr/bin/env nu
use ./lib.nu *
const qmd_debounce = 1min
const idle_sleep = 10sec
const vision_model = 'openai-codex/gpt-5.4'
const transcribe_timeout = '90s'
const normalize_timeout = '60s'
def next-queued-job [] {
sql-json "
select job_id, note_id, operation, job_manifest_path, result_path, source_hash
from jobs
where status = 'queued'
order by requested_at asc
limit 1;
"
| first
}
def maybe-update-qmd [] {
let dirty = (qmd-dirty-file)
if not ($dirty | path exists) {
return
}
let modified = ((ls -l $dirty | first).modified)
if ((date now) - $modified) < $qmd_debounce {
return
}
print 'Running qmd update'
let result = (do {
cd (notes-root)
run-external qmd 'update' | complete
})
if $result.exit_code != 0 {
print $"qmd update failed: ($result.stderr | str trim)"
return
}
rm -f $dirty
}
def write-result [result_path: path, payload: record] {
mkdir ($result_path | path dirname)
($payload | to json --indent 2) | save -f $result_path
}
def error-message [error: any] {
let msg = (($error.msg? | default '') | into string)
if ($msg == '' or $msg == 'External command failed') {
$error | to nuon
} else {
$msg
}
}
def unquote [value?: any] {
if $value == null {
''
} else {
($value | into string | str replace -r '^"(.*)"$' '$1' | str replace -r "^'(.*)'$" '$1')
}
}
def source-format [file: path] {
(([$file] | path parse | first).extension? | default 'bin' | str downcase)
}
def conflict-path-for [output_path: path] {
let parsed = ([$output_path] | path parse | first)
let stamp = ((date now) | format date '%Y-%m-%dT%H-%M-%SZ')
[$parsed.parent $"($parsed.stem).conflict-($stamp).($parsed.extension)"] | path join
}
def find-managed-outputs [note_id: string] {
let root = (notes-root)
if not ($root | path exists) {
[]
} else {
(glob $"($root)/**/*.md")
| where not ($it | str contains '/.')
| where {|file|
let parsed = (parse-output-frontmatter $file)
(unquote ($parsed.managed_by? | default '')) == 'notability-ingest' and (unquote ($parsed.note_id? | default '')) == $note_id
}
| sort
}
}
def resolve-managed-output-path [note_id: string, configured_output_path: path] {
if ($configured_output_path | path exists) {
let parsed = (parse-output-frontmatter $configured_output_path)
let managed_by = (unquote ($parsed.managed_by? | default ''))
let frontmatter_note_id = (unquote ($parsed.note_id? | default ''))
if ($managed_by == 'notability-ingest' and $frontmatter_note_id == $note_id) {
return $configured_output_path
}
}
let discovered = (find-managed-outputs $note_id)
if ($discovered | is-empty) {
$configured_output_path
} else if (($discovered | length) == 1) {
$discovered | first
} else {
error make {
msg: $"Multiple managed note files found for ($note_id): (($discovered | str join ', '))"
}
}
}
def determine-write-target [manifest: record] {
let output_path = (resolve-managed-output-path $manifest.note_id $manifest.output_path)
if not ($output_path | path exists) {
return {
output_path: $output_path
write_path: $output_path
write_mode: 'create'
updated_main_output: true
}
}
let parsed = (parse-output-frontmatter $output_path)
let managed_by = (unquote ($parsed.managed_by? | default ''))
let frontmatter_note_id = (unquote ($parsed.note_id? | default ''))
if ($managed_by == 'notability-ingest' and $frontmatter_note_id == $manifest.note_id) {
return {
output_path: $output_path
write_path: $output_path
write_mode: 'overwrite'
updated_main_output: true
}
}
{
output_path: $output_path
write_path: (conflict-path-for $output_path)
write_mode: 'conflict'
updated_main_output: false
}
}
def build-markdown [manifest: record, normalized: string] {
let body = ($normalized | str trim)
let output_body = if $body == '' {
$"# ($manifest.title)"
} else {
$body
}
let created = ($manifest.requested_at | str substring 0..9)
let updated = ((date now) | format date '%Y-%m-%d')
[
'---'
$"title: ($manifest.title | to json)"
$"created: ($created | to json)"
$"updated: ($updated | to json)"
'source: "notability"'
$"source_transport: (($manifest.source_transport? | default 'webdav') | to json)"
$"source_relpath: ($manifest.source_relpath | to json)"
$"note_id: ($manifest.note_id | to json)"
'managed_by: "notability-ingest"'
$"source_file: ($manifest.archive_path | to json)"
$"source_file_hash: ($'sha256:($manifest.source_hash)' | to json)"
$"source_format: ((source-format $manifest.archive_path) | to json)"
'status: "active"'
'tags:'
' - handwritten'
' - notability'
'---'
''
$output_body
''
] | str join "\n"
}
def render-pages [input_path: path, job_id: string] {
let extension = (([$input_path] | path parse | first).extension? | default '' | str downcase)
if $extension == 'png' {
[ $input_path ]
} else if $extension == 'pdf' {
let render_dir = [(render-root) $job_id] | path join
mkdir $render_dir
let prefix = [$render_dir 'page'] | path join
^pdftoppm -png -r 200 $input_path $prefix
let pages = ((glob $"($render_dir)/*.png") | sort)
if ($pages | is-empty) {
error make {
msg: $"No PNG pages rendered from ($input_path)"
}
}
$pages
} else {
error make {
msg: $"Unsupported Notability input format: ($input_path)"
}
}
}
def call-pi [timeout_window: string, prompt: string, inputs: list<path>, thinking: string] {
let prompt_file = (^mktemp --suffix '.md' | str trim)
$prompt | save -f $prompt_file
let input_refs = ($inputs | each {|input| $'@($input)' })
let prompt_ref = $'@($prompt_file)'
let result = (try {
^timeout $timeout_window pi --model $vision_model --thinking $thinking --no-tools --no-session -p ...$input_refs $prompt_ref | complete
} catch {|error|
rm -f $prompt_file
error make {
msg: (error-message $error)
}
})
rm -f $prompt_file
let output = ($result.stdout | str trim)
if $output != '' {
$output
} else {
let stderr = ($result.stderr | str trim)
if $stderr == '' {
error make {
msg: $"pi returned no output (exit ($result.exit_code))"
}
} else {
error make {
msg: $"pi returned no output (exit ($result.exit_code)): ($stderr)"
}
}
}
}
def ingest-job [manifest: record] {
mkdir $manifest.session_dir
let page_paths = (render-pages $manifest.input_path $manifest.job_id)
let transcribe_prompt = ([
'Transcribe this note into clean Markdown.'
''
'Read it like a human and reconstruct the intended reading order and structure.'
''
'Do not preserve handwritten layout literally.'
''
'Handwritten line breaks, word stacking, font size changes, and spacing are not semantic structure by default.'
''
'If adjacent handwritten lines clearly belong to one sentence or short phrase, merge them into normal prose with spaces instead of separate Markdown lines.'
''
'Only keep separate lines or blank lines when there is clear evidence of separate paragraphs, headings, list items, checkboxes, or other distinct blocks.'
''
'Keep headings, lists, and paragraphs when they are genuinely present.'
''
'Do not summarize. Do not add commentary. Return Markdown only.'
] | str join "\n")
print $"Transcribing ($manifest.job_id) with page count ($page_paths | length)"
let transcript = (call-pi $transcribe_timeout $transcribe_prompt $page_paths 'low')
mkdir ($manifest.transcript_path | path dirname)
$"($transcript)\n" | save -f $manifest.transcript_path
let normalize_prompt = ([
'Rewrite the attached transcription into clean Markdown.'
''
'Preserve the same content and intended structure.'
''
'Collapse layout-only line breaks from handwriting.'
''
'If short adjacent lines are really one sentence or phrase, join them with spaces instead of keeping one line per handwritten row.'
''
'Use separate lines only for real headings, list items, checkboxes, or distinct paragraphs.'
''
'Do not summarize. Return Markdown only.'
] | str join "\n")
print $"Normalizing ($manifest.job_id)"
let normalized = (call-pi $normalize_timeout $normalize_prompt [ $manifest.transcript_path ] 'off')
let markdown = (build-markdown $manifest $normalized)
let target = (determine-write-target $manifest)
mkdir ($target.write_path | path dirname)
$markdown | save -f $target.write_path
{
success: true
job_id: $manifest.job_id
note_id: $manifest.note_id
archive_path: $manifest.archive_path
source_hash: $manifest.source_hash
session_dir: $manifest.session_dir
output_path: $target.output_path
output_hash: (if $target.updated_main_output { sha256 $target.write_path } else { null })
conflict_path: (if $target.write_mode == 'conflict' { $target.write_path } else { null })
write_mode: $target.write_mode
updated_main_output: $target.updated_main_output
transcript_path: $manifest.transcript_path
}
}
def mark-failure [job: record, running_path: string, error_summary: string, result?: any] {
let finished_at = (now-iso)
sql-run $"
update jobs
set status = 'failed',
finished_at = (sql-quote $finished_at),
error_summary = (sql-quote $error_summary),
job_manifest_path = (sql-quote (manifest-path-for $job.job_id 'failed'))
where job_id = (sql-quote $job.job_id);
update notes
set status = 'failed',
last_error = (sql-quote $error_summary)
where note_id = (sql-quote $job.note_id);
"
| ignore
if $result != null and ($result.archive_path? | default null) != null {
sql-run $"
update versions
set ingest_result = 'failed',
session_path = (sql-quote ($result.session_dir? | default ''))
where archive_path = (sql-quote $result.archive_path);
"
| ignore
}
let failed_path = (manifest-path-for $job.job_id 'failed')
if ($running_path | path exists) {
mv -f $running_path $failed_path
}
log-event $job.note_id 'job-failed' {
job_id: $job.job_id
error: $error_summary
}
}
def mark-success [job: record, running_path: string, result: record] {
let finished_at = (now-iso)
let note_status = if ($result.write_mode? | default 'write') == 'conflict' {
'conflict'
} else {
'active'
}
let output_path_q = (sql-quote ($result.output_path? | default null))
let output_hash_update = if ($result.updated_main_output? | default false) {
sql-quote ($result.output_hash? | default null)
} else {
'last_generated_output_hash'
}
let source_hash_update = if ($result.updated_main_output? | default false) {
sql-quote ($result.source_hash? | default null)
} else {
'last_generated_source_hash'
}
sql-run $"
update jobs
set status = 'done',
finished_at = (sql-quote $finished_at),
error_summary = null,
job_manifest_path = (sql-quote (manifest-path-for $job.job_id 'done'))
where job_id = (sql-quote $job.job_id);
update notes
set status = (sql-quote $note_status),
output_path = ($output_path_q),
last_processed_at = (sql-quote $finished_at),
last_generated_output_hash = ($output_hash_update),
last_generated_source_hash = ($source_hash_update),
conflict_path = (sql-quote ($result.conflict_path? | default null)),
last_error = null
where note_id = (sql-quote $job.note_id);
update versions
set ingest_result = 'success',
session_path = (sql-quote ($result.session_dir? | default ''))
where archive_path = (sql-quote $result.archive_path);
"
| ignore
let done_path = (manifest-path-for $job.job_id 'done')
if ($running_path | path exists) {
mv -f $running_path $done_path
}
^touch (qmd-dirty-file)
log-event $job.note_id 'job-finished' {
job_id: $job.job_id
write_mode: ($result.write_mode? | default 'write')
output_path: ($result.output_path? | default '')
conflict_path: ($result.conflict_path? | default '')
}
}
def recover-running-jobs [] {
let jobs = (sql-json "
select job_id, note_id, job_manifest_path, result_path
from jobs
where status = 'running'
order by started_at asc;
")
for job in $jobs {
let running_path = (manifest-path-for $job.job_id 'running')
let result = if ($job.result_path | path exists) {
open $job.result_path
} else {
null
}
mark-failure $job $running_path 'worker interrupted before completion' $result
}
}
def process-job [job: record] {
let running_path = (manifest-path-for $job.job_id 'running')
mv -f $job.job_manifest_path $running_path
sql-run $"
update jobs
set status = 'running',
started_at = (sql-quote (now-iso)),
job_manifest_path = (sql-quote $running_path)
where job_id = (sql-quote $job.job_id);
"
| ignore
print $"Processing ($job.job_id) for ($job.note_id)"
let manifest = (open $running_path)
try {
let result = (ingest-job $manifest)
write-result $job.result_path $result
mark-success $job $running_path $result
} catch {|error|
let message = (error-message $error)
let result = {
success: false
job_id: $manifest.job_id
note_id: $manifest.note_id
archive_path: $manifest.archive_path
source_hash: $manifest.source_hash
session_dir: $manifest.session_dir
error: $message
}
write-result $job.result_path $result
mark-failure $job $running_path $message $result
}
}
def drain-queued-jobs [] {
loop {
let job = (next-queued-job)
if $job == null {
maybe-update-qmd
break
}
process-job $job
maybe-update-qmd
}
}
export def worker-run [--drain] {
ensure-layout
recover-running-jobs
if $drain {
drain-queued-jobs
return
}
while true {
let job = (next-queued-job)
if $job == null {
maybe-update-qmd
sleep $idle_sleep
continue
}
process-job $job
maybe-update-qmd
}
}
def main [--drain] {
worker-run --drain=$drain
}

View File

@@ -1,13 +1,44 @@
import type { Plugin } from "@opencode-ai/plugin";
const GIT_PATTERN = /(?:^|[;&|]\s*|&&\s*|\|\|\s*|\$\(\s*|`\s*)git\s/;
const COMMAND_PREFIXES = new Set([
"env",
"command",
"builtin",
"time",
"sudo",
"nohup",
"nice",
]);
function findCommandWord(words: string[]): string | undefined {
for (const word of words) {
if (COMMAND_PREFIXES.has(word)) continue;
if (/^[A-Za-z_][A-Za-z0-9_]*=/.test(word)) continue;
return word;
}
return undefined;
}
function segmentHasGit(words: string[]): boolean {
const cmd = findCommandWord(words);
return cmd === "git";
}
function containsBlockedGit(command: string): boolean {
const segments = command.split(/\s*(?:&&|\|\||[;&|]|\$\(|`)\s*/);
for (const segment of segments) {
const words = segment.trim().split(/\s+/).filter(Boolean);
if (segmentHasGit(words)) return true;
}
return false;
}
export const BlockGitPlugin: Plugin = async () => {
return {
"tool.execute.before": async (input, output) => {
if (input.tool === "bash") {
const command = output.args.command as string;
if (GIT_PATTERN.test(command)) {
if (containsBlockedGit(command)) {
throw new Error(
"This project uses jj, only use `jj` commands, not `git`.",
);

View File

@@ -1,7 +1,7 @@
import type { Plugin } from "@opencode-ai/plugin";
const SCRIPTING_PATTERN =
/(?:^|[;&|]\s*|&&\s*|\|\|\s*|\$\(\s*|`\s*)(?:python[23]?|perl|ruby|php|lua|bash\s+-c|sh\s+-c)\s/;
/(?:^|[;&|]\s*|&&\s*|\|\|\s*|\$\(\s*|`\s*)(?:python[23]?|perl|ruby|php|lua|node\s+-e|bash\s+-c|sh\s+-c)\s/;
export const BlockScriptingPlugin: Plugin = async () => {
return {

View File

@@ -10,7 +10,9 @@ export const DirenvPlugin: Plugin = async ({ $ }) => {
.json();
Object.assign(output.env, exported);
} catch {}
} catch (error) {
console.warn("[direnv] failed to export env:", error);
}
},
};
};

View File

@@ -3,6 +3,8 @@ import type {
TuiPluginModule,
TuiDialogSelectOption,
} from "@opencode-ai/plugin/tui"
import { promises as fs } from "node:fs"
import path from "node:path"
type BookmarkRef = { name: string; remote?: string }
type Change = { changeId: string; title: string }
@@ -17,9 +19,143 @@ type ReviewTarget =
baseBookmark: string
baseRemote?: string
title: string
}
}
| { type: "folder"; paths: string[] }
type ReviewSelectorValue = ReviewTarget["type"] | "toggleCustomInstructions"
const CUSTOM_INSTRUCTIONS_KEY = "review.customInstructions"
const WORKING_COPY_PROMPT =
"Review the current working-copy changes (including new files) and provide prioritized findings."
const LOCAL_CHANGES_REVIEW_INSTRUCTIONS =
"Also include local working-copy changes (including new files) on top of this bookmark. Use `jj status`, `jj diff --summary`, and `jj diff` so local fixes are part of this review cycle."
const BASE_BOOKMARK_PROMPT_WITH_MERGE_BASE =
"Review the code changes against the base bookmark '{baseBookmark}'. The merge-base change for this comparison is {mergeBaseChangeId}. Run `jj diff --from {mergeBaseChangeId} --to @` to inspect the changes relative to {baseBookmark}. Provide prioritized, actionable findings."
const BASE_BOOKMARK_PROMPT_FALLBACK =
"Review the code changes against the base bookmark '{bookmark}'. Start by finding the merge-base revision between the working copy and {bookmark}, then run `jj diff --from <merge-base> --to @` to see what changes would land on the {bookmark} bookmark. Provide prioritized, actionable findings."
const CHANGE_PROMPT_WITH_TITLE =
'Review the code changes introduced by change {changeId} ("{title}"). Provide prioritized, actionable findings.'
const CHANGE_PROMPT =
"Review the code changes introduced by change {changeId}. Provide prioritized, actionable findings."
const PULL_REQUEST_PROMPT =
'Review pull request #{prNumber} ("{title}") against the base bookmark \'{baseBookmark}\'. The merge-base change for this comparison is {mergeBaseChangeId}. Run `jj diff --from {mergeBaseChangeId} --to @` to inspect the changes that would be merged. Provide prioritized, actionable findings.'
const PULL_REQUEST_PROMPT_FALLBACK =
'Review pull request #{prNumber} ("{title}") against the base bookmark \'{baseBookmark}\'. Start by finding the merge-base revision between the working copy and {baseBookmark}, then run `jj diff --from <merge-base> --to @` to see the changes that would be merged. Provide prioritized, actionable findings.'
const FOLDER_REVIEW_PROMPT =
"Review the code in the following paths: {paths}. This is a snapshot review (not a diff). Read the files directly in these paths and provide prioritized, actionable findings."
const REVIEW_RUBRIC = `# Review Guidelines
You are acting as a code reviewer for a proposed code change made by another engineer.
Below are default guidelines for determining what to flag. These are not the final word — if you encounter more specific guidelines elsewhere (in a developer message, user message, file, or project review guidelines appended below), those override these general instructions.
## Determining what to flag
Flag issues that:
1. Meaningfully impact the accuracy, performance, security, or maintainability of the code.
2. Are discrete and actionable (not general issues or multiple combined issues).
3. Don't demand rigor inconsistent with the rest of the codebase.
4. Were introduced in the changes being reviewed (not pre-existing bugs).
5. The author would likely fix if aware of them.
6. Don't rely on unstated assumptions about the codebase or author's intent.
7. Have provable impact on other parts of the code — it is not enough to speculate that a change may disrupt another part, you must identify the parts that are provably affected.
8. Are clearly not intentional changes by the author.
9. Be particularly careful with untrusted user input and follow the specific guidelines to review.
10. Treat silent local error recovery (especially parsing/IO/network fallbacks) as high-signal review candidates unless there is explicit boundary-level justification.
## Untrusted User Input
1. Be careful with open redirects, they must always be checked to only go to trusted domains (?next_page=...)
2. Always flag SQL that is not parametrized
3. In systems with user supplied URL input, http fetches always need to be protected against access to local resources (intercept DNS resolver!)
4. Escape, don't sanitize if you have the option (eg: HTML escaping)
## Comment guidelines
1. Be clear about why the issue is a problem.
2. Communicate severity appropriately - don't exaggerate.
3. Be brief - at most 1 paragraph.
4. Keep code snippets under 3 lines, wrapped in inline code or code blocks.
5. Use \`\`\`suggestion blocks ONLY for concrete replacement code (minimal lines; no commentary inside the block). Preserve the exact leading whitespace of the replaced lines.
6. Explicitly state scenarios/environments where the issue arises.
7. Use a matter-of-fact tone - helpful AI assistant, not accusatory.
8. Write for quick comprehension without close reading.
9. Avoid excessive flattery or unhelpful phrases like "Great job...".
## Review priorities
1. Surface critical non-blocking human callouts (migrations, dependency churn, auth/permissions, compatibility, destructive operations) at the end.
2. Prefer simple, direct solutions over wrappers or abstractions without clear value.
3. Treat back pressure handling as critical to system stability.
4. Apply system-level thinking; flag changes that increase operational risk or on-call wakeups.
5. Ensure that errors are always checked against codes or stable identifiers, never error messages.
## Fail-fast error handling (strict)
When reviewing added or modified error handling, default to fail-fast behavior.
1. Evaluate every new or changed \`try/catch\`: identify what can fail and why local handling is correct at that exact layer.
2. Prefer propagation over local recovery. If the current scope cannot fully recover while preserving correctness, rethrow (optionally with context) instead of returning fallbacks.
3. Flag catch blocks that hide failure signals (e.g. returning \`null\`/\`[]\`/\`false\`, swallowing JSON parse failures, logging-and-continue, or “best effort” silent recovery).
4. JSON parsing/decoding should fail loudly by default. Quiet fallback parsing is only acceptable with an explicit compatibility requirement and clear tested behavior.
5. Boundary handlers (HTTP routes, CLI entrypoints, supervisors) may translate errors, but must not pretend success or silently degrade.
6. If a catch exists only to satisfy lint/style without real handling, treat it as a bug.
7. When uncertain, prefer crashing fast over silent degradation.
## Required human callouts (non-blocking, at the very end)
After findings/verdict, you MUST append this final section:
## Human Reviewer Callouts (Non-Blocking)
Include only applicable callouts (no yes/no lines):
- **This change adds a database migration:** <files/details>
- **This change introduces a new dependency:** <package(s)/details>
- **This change changes a dependency (or the lockfile):** <files/package(s)/details>
- **This change modifies auth/permission behavior:** <what changed and where>
- **This change introduces backwards-incompatible public schema/API/contract changes:** <what changed and where>
- **This change includes irreversible or destructive operations:** <operation and scope>
Rules for this section:
1. These are informational callouts for the human reviewer, not fix items.
2. Do not include them in Findings unless there is an independent defect.
3. These callouts alone must not change the verdict.
4. Only include callouts that apply to the reviewed change.
5. Keep each emitted callout bold exactly as written.
6. If none apply, write "- (none)".
## Priority levels
Tag each finding with a priority level in the title:
- [P0] - Drop everything to fix. Blocking release/operations. Only for universal issues that do not depend on assumptions about inputs.
- [P1] - Urgent. Should be addressed in the next cycle.
- [P2] - Normal. To be fixed eventually.
- [P3] - Low. Nice to have.
## Output format
Provide your findings in a clear, structured format:
1. List each finding with its priority tag, file location, and explanation.
2. Findings must reference locations that overlap with the actual diff — don't flag pre-existing code.
3. Keep line references as short as possible (avoid ranges over 5-10 lines; pick the most suitable subrange).
4. Provide an overall verdict: "correct" (no blocking issues) or "needs attention" (has blocking issues).
5. Ignore trivial style issues unless they obscure meaning or violate documented standards.
6. Do not generate a full PR fix — only flag issues and optionally provide short suggestion blocks.
7. End with the required "Human Reviewer Callouts (Non-Blocking)" section and all applicable bold callouts (no yes/no).
Output all findings the author would fix if they knew about them. If there are no qualifying findings, explicitly state the code looks good. Don't stop at the first finding - list every qualifying issue. Then append the required non-blocking callouts section.`
function bookmarkLabel(b: BookmarkRef): string {
return b.remote ? `${b.name}@${b.remote}` : b.name
}
@@ -94,6 +230,14 @@ function sanitizeRemoteName(value: string): string {
const plugin: TuiPlugin = async (api) => {
const cwd = api.state.path.directory
let reviewCustomInstructions = normalizeCustomInstructions(
api.kv.get<string | undefined>(CUSTOM_INSTRUCTIONS_KEY, undefined),
)
function setReviewCustomInstructions(value?: string): void {
reviewCustomInstructions = normalizeCustomInstructions(value)
api.kv.set(CUSTOM_INSTRUCTIONS_KEY, reviewCustomInstructions)
}
// -- shell helpers -------------------------------------------------------
@@ -114,6 +258,10 @@ const plugin: TuiPlugin = async (api) => {
return { stdout, exitCode, stderr }
}
function sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms))
}
async function jj(
...args: string[]
): Promise<{ stdout: string; ok: boolean }> {
@@ -206,7 +354,8 @@ const plugin: TuiPlugin = async (api) => {
bookmark: string,
remote?: string,
): Promise<string | null> {
const ref: BookmarkRef = { name: bookmark, remote }
const ref = await resolveBookmarkRef(bookmark, remote)
if (!ref) return null
const r = await jj(
"log",
"-r",
@@ -231,6 +380,8 @@ const plugin: TuiPlugin = async (api) => {
title: string
baseBookmark: string
baseRemote?: string
headBookmark: string
remote: string
savedChangeId: string
}
| { ok: false; error: string }
@@ -371,159 +522,467 @@ const plugin: TuiPlugin = async (api) => {
if (addedTempRemote) await jj("git", "remote", "remove", remoteName)
// Resolve base bookmark remote
const baseBms = await getBookmarks()
const baseRef = baseBms.find((b) => b.name === prInfo.baseRefName)
const baseRef = await resolveBookmarkRef(prInfo.baseRefName)
return {
ok: true,
title: prInfo.title,
baseBookmark: prInfo.baseRefName,
baseRemote: baseRef?.remote,
headBookmark: prInfo.headRefName,
remote: remoteName,
savedChangeId,
}
}
function normalizeCustomInstructions(
value: string | undefined,
): string | undefined {
const normalized = value?.trim()
return normalized ? normalized : undefined
}
function parseNonEmptyLines(stdout: string): string[] {
return stdout
.trim()
.split("\n")
.map((line) => line.trim())
.filter(Boolean)
}
function bookmarkRefsEqual(left: BookmarkRef, right: BookmarkRef): boolean {
return left.name === right.name && left.remote === right.remote
}
function parseBookmarkReference(value: string): BookmarkRef {
const trimmed = value.trim()
const separatorIndex = trimmed.lastIndexOf("@")
if (separatorIndex <= 0 || separatorIndex === trimmed.length - 1) {
return { name: trimmed }
}
return {
name: trimmed.slice(0, separatorIndex),
remote: trimmed.slice(separatorIndex + 1),
}
}
function dedupeBookmarkRefs(bookmarks: BookmarkRef[]): BookmarkRef[] {
const seen = new Set<string>()
const result: BookmarkRef[] = []
for (const bookmark of bookmarks) {
const key = `${bookmark.name}@${bookmark.remote ?? ""}`
if (seen.has(key)) continue
seen.add(key)
result.push(bookmark)
}
return result
}
async function getBookmarkRefs(options?: {
revset?: string
includeRemotes?: boolean
}): Promise<BookmarkRef[]> {
const args = ["bookmark", "list"]
if (options?.includeRemotes) args.push("--all-remotes")
if (options?.revset) args.push("-r", options.revset)
args.push("-T", 'name ++ "\\t" ++ remote ++ "\\n"')
const r = await jj(...args)
if (!r.ok) return []
return dedupeBookmarkRefs(parseBookmarks(r.stdout))
}
async function getSingleRevisionId(revset: string): Promise<string | null> {
const r = await jj(
"log",
"-r",
revset,
"--no-graph",
"-T",
'commit_id ++ "\\n"',
)
if (!r.ok) return null
const revisions = parseNonEmptyLines(r.stdout)
return revisions.length === 1 ? revisions[0] : null
}
async function getSingleChangeId(revset: string): Promise<string | null> {
const r = await jj(
"log",
"-r",
revset,
"--no-graph",
"-T",
'change_id.shortest(8) ++ "\\n"',
)
if (!r.ok) return null
const revisions = parseNonEmptyLines(r.stdout)
return revisions.length === 1 ? revisions[0] : null
}
async function getJjRemotes(): Promise<Array<{ name: string; url: string }>> {
const r = await jj("git", "remote", "list")
if (!r.ok) return []
return parseNonEmptyLines(r.stdout)
.map((line) => {
const [name, ...urlParts] = line.split(/\s+/)
return { name, url: urlParts.join(" ") }
})
.filter((remote) => remote.name && remote.url)
}
async function getDefaultRemoteName(): Promise<string | null> {
const remotes = await getJjRemotes()
if (remotes.length === 0) return null
return remotes.find((remote) => remote.name === "origin")?.name ?? remotes[0].name
}
function preferBookmarkRef(
bookmarks: BookmarkRef[],
preferredRemote?: string | null,
): BookmarkRef | null {
if (bookmarks.length === 0) return null
return (
bookmarks.find((bookmark) => !bookmark.remote) ??
(preferredRemote
? bookmarks.find((bookmark) => bookmark.remote === preferredRemote)
: undefined) ??
bookmarks[0]
)
}
async function resolveBookmarkRef(
bookmark: string,
remote?: string,
): Promise<BookmarkRef | null> {
if (remote) return { name: bookmark, remote }
const localBookmark = (await getBookmarkRefs()).find(
(entry) => entry.name === bookmark,
)
if (localBookmark) return localBookmark
const matchingRemoteBookmarks = (
await getBookmarkRefs({ includeRemotes: true })
).filter((entry) => entry.remote && entry.name === bookmark)
if (matchingRemoteBookmarks.length === 0) return null
return preferBookmarkRef(
matchingRemoteBookmarks,
await getDefaultRemoteName(),
)
}
async function getReviewBookmarks(): Promise<BookmarkRef[]> {
const localBookmarks = await getBookmarkRefs()
const localNames = new Set(localBookmarks.map((bookmark) => bookmark.name))
const defaultRemoteName = await getDefaultRemoteName()
const remoteOnlyBookmarks = (
await getBookmarkRefs({ includeRemotes: true })
)
.filter((bookmark) => bookmark.remote && !localNames.has(bookmark.name))
.sort((left, right) => {
if (left.name !== right.name) return left.name.localeCompare(right.name)
if (left.remote === defaultRemoteName) return -1
if (right.remote === defaultRemoteName) return 1
return (left.remote ?? "").localeCompare(right.remote ?? "")
})
return dedupeBookmarkRefs([...localBookmarks, ...remoteOnlyBookmarks])
}
async function getReviewHeadRevset(): Promise<string> {
return (await hasWorkingCopyChanges()) ? "@" : "@-"
}
async function getCurrentReviewBookmarks(): Promise<BookmarkRef[]> {
return getBookmarkRefs({
revset: await getReviewHeadRevset(),
includeRemotes: true,
})
}
async function getDefaultBookmarkRef(): Promise<BookmarkRef | null> {
const defaultRemoteName = await getDefaultRemoteName()
const trunkBookmarks = await getBookmarkRefs({
revset: "trunk()",
includeRemotes: true,
})
const trunkBookmark = preferBookmarkRef(trunkBookmarks, defaultRemoteName)
if (trunkBookmark) return trunkBookmark
const bookmarks = await getReviewBookmarks()
const mainBookmark =
bookmarks.find((bookmark) => !bookmark.remote && bookmark.name === "main") ??
bookmarks.find((bookmark) => !bookmark.remote && bookmark.name === "master") ??
bookmarks.find(
(bookmark) =>
bookmark.remote === defaultRemoteName && bookmark.name === "main",
) ??
bookmarks.find(
(bookmark) =>
bookmark.remote === defaultRemoteName && bookmark.name === "master",
)
return mainBookmark ?? bookmarks[0] ?? null
}
async function loadProjectReviewGuidelines(): Promise<string | null> {
let currentDir = path.resolve(cwd)
while (true) {
const opencodeDir = path.join(currentDir, ".opencode")
const guidelinesPath = path.join(currentDir, "REVIEW_GUIDELINES.md")
const opencodeStats = await fs.stat(opencodeDir).catch(() => null)
if (opencodeStats?.isDirectory()) {
const guidelineStats = await fs.stat(guidelinesPath).catch(() => null)
if (!guidelineStats?.isFile()) return null
try {
const content = await fs.readFile(guidelinesPath, "utf8")
const trimmed = content.trim()
return trimmed ? trimmed : null
} catch {
return null
}
}
const parentDir = path.dirname(currentDir)
if (parentDir === currentDir) return null
currentDir = parentDir
}
}
// -- prompt building -----------------------------------------------------
async function buildPrompt(target: ReviewTarget): Promise<string> {
async function buildTargetReviewPrompt(
target: ReviewTarget,
options?: { includeLocalChanges?: boolean },
): Promise<string> {
const includeLocalChanges = options?.includeLocalChanges === true
switch (target.type) {
case "workingCopy":
return "Review the current working-copy changes (including new files). Use `jj status`, `jj diff --summary`, and `jj diff` to inspect."
return WORKING_COPY_PROMPT
case "baseBookmark": {
const label = bookmarkLabel({
name: target.bookmark,
remote: target.remote,
})
const mergeBase = await getMergeBase(
const bookmark = await resolveBookmarkRef(
target.bookmark,
target.remote,
)
if (mergeBase) {
return `Review code changes against the base bookmark '${label}'. The merge-base change is ${mergeBase}. Run \`jj diff --from ${mergeBase} --to @\` to inspect the changes. Also check for local working-copy changes with \`jj diff --summary\`.`
}
return `Review code changes against the base bookmark '${label}'. Find the merge-base between @ and ${label}, then run \`jj diff --from <merge-base> --to @\`. Also check for local working-copy changes.`
const bookmarkLabelValue = bookmarkLabel(
bookmark ?? { name: target.bookmark, remote: target.remote },
)
const mergeBase = await getMergeBase(target.bookmark, target.remote)
const basePrompt = mergeBase
? BASE_BOOKMARK_PROMPT_WITH_MERGE_BASE
.replace(/{baseBookmark}/g, bookmarkLabelValue)
.replace(/{mergeBaseChangeId}/g, mergeBase)
: BASE_BOOKMARK_PROMPT_FALLBACK.replace(
/{bookmark}/g,
bookmarkLabelValue,
)
return includeLocalChanges
? `${basePrompt} ${LOCAL_CHANGES_REVIEW_INSTRUCTIONS}`
: basePrompt
}
case "change":
return target.title
? `Review the code changes introduced by change ${target.changeId} ("${target.title}"). Use \`jj show ${target.changeId}\` to inspect.`
: `Review the code changes introduced by change ${target.changeId}. Use \`jj show ${target.changeId}\` to inspect.`
? CHANGE_PROMPT_WITH_TITLE.replace(
"{changeId}",
target.changeId,
).replace("{title}", target.title)
: CHANGE_PROMPT.replace("{changeId}", target.changeId)
case "pullRequest": {
const label = bookmarkLabel({
name: target.baseBookmark,
remote: target.baseRemote,
})
const bookmark = await resolveBookmarkRef(
target.baseBookmark,
target.baseRemote,
)
const baseBookmarkLabel = bookmarkLabel(
bookmark ?? {
name: target.baseBookmark,
remote: target.baseRemote,
},
)
const mergeBase = await getMergeBase(
target.baseBookmark,
target.baseRemote,
)
if (mergeBase) {
return `Review pull request #${target.prNumber} ("${target.title}") against '${label}'. Merge-base is ${mergeBase}. Run \`jj diff --from ${mergeBase} --to @\` to inspect.`
}
return `Review pull request #${target.prNumber} ("${target.title}") against '${label}'. Find the merge-base and run \`jj diff --from <merge-base> --to @\`.`
const basePrompt = mergeBase
? PULL_REQUEST_PROMPT.replace(/{prNumber}/g, String(target.prNumber))
.replace(/{title}/g, target.title)
.replace(/{baseBookmark}/g, baseBookmarkLabel)
.replace(/{mergeBaseChangeId}/g, mergeBase)
: PULL_REQUEST_PROMPT_FALLBACK.replace(
/{prNumber}/g,
String(target.prNumber),
)
.replace(/{title}/g, target.title)
.replace(/{baseBookmark}/g, baseBookmarkLabel)
return includeLocalChanges
? `${basePrompt} ${LOCAL_CHANGES_REVIEW_INSTRUCTIONS}`
: basePrompt
}
case "folder":
return `Review the code in the following paths: ${target.paths.join(", ")}. This is a snapshot review (not a diff). Read the files directly.`
return FOLDER_REVIEW_PROMPT.replace(
"{paths}",
target.paths.join(", "),
)
}
}
async function buildReviewPrompt(target: ReviewTarget): Promise<string> {
const task = await buildPrompt(target)
return [
"You are acting as a code reviewer. Do not make code changes. Provide actionable feedback on code changes.",
"",
"Diffs alone are not enough. Read the full file(s) being modified to understand context. Code that looks wrong in isolation may be correct given surrounding logic.",
"",
"What to look for:",
"",
"Bugs — primary focus:",
"- Logic errors, off-by-one mistakes, incorrect conditionals",
"- Missing guards, unreachable code paths, broken error handling",
"- Edge cases: null/empty inputs, race conditions",
"- Security: injection, auth bypass, data exposure",
"",
"Structure:",
"- Does the code fit the codebase's patterns and conventions?",
"- Does it use established abstractions?",
"- Is there excessive nesting that should be flattened?",
"",
"Performance:",
"- Only flag obvious issues like O(n^2) on unbounded data, N+1 queries, or blocking I/O on hot paths.",
"",
"Before you flag something:",
"- Be certain. Investigate first if unsure.",
"- Do not invent hypothetical problems.",
"- Do not be a zealot about style.",
"- Only review the requested changes, not unrelated pre-existing issues.",
"",
"Output:",
"- Be direct about bugs and why they are bugs",
"- Communicate severity honestly",
"- Include file paths and line numbers",
"- Suggest fixes when appropriate",
"- Use a matter-of-fact tone, no flattery",
"",
"Task:",
task,
].join("\n")
const prompt = await buildTargetReviewPrompt(target)
const projectGuidelines = await loadProjectReviewGuidelines()
const sharedInstructions = normalizeCustomInstructions(
reviewCustomInstructions,
)
let fullPrompt = `${REVIEW_RUBRIC}\n\n---\n\nPlease perform a code review with the following focus:\n\n${prompt}`
if (sharedInstructions) {
fullPrompt += `\n\nShared custom review instructions (applies to all reviews):\n\n${sharedInstructions}`
}
if (projectGuidelines) {
fullPrompt += `\n\nThis project has additional instructions for code reviews:\n\n${projectGuidelines}`
}
return fullPrompt
}
async function getSmartDefault(): Promise<
"workingCopy" | "baseBookmark" | "change"
> {
if (await hasWorkingCopyChanges()) return "workingCopy"
const defaultBookmark = await getDefaultBookmarkRef()
if (defaultBookmark) {
const reviewHeadRevision = await getSingleRevisionId(
await getReviewHeadRevset(),
)
const defaultBookmarkRevision = await getSingleRevisionId(
bookmarkRevset(defaultBookmark),
)
if (
reviewHeadRevision &&
defaultBookmarkRevision &&
reviewHeadRevision !== defaultBookmarkRevision
) {
return "baseBookmark"
}
}
return "change"
}
function getUserFacingHint(target: ReviewTarget): string {
switch (target.type) {
case "workingCopy":
return "working-copy changes"
case "baseBookmark":
return `changes against '${bookmarkLabel({ name: target.bookmark, remote: target.remote })}'`
case "change":
return target.title
? `change ${target.changeId}: ${target.title}`
: `change ${target.changeId}`
case "pullRequest": {
const shortTitle =
target.title.length > 30
? `${target.title.slice(0, 27)}...`
: target.title
return `PR #${target.prNumber}: ${shortTitle}`
}
case "folder": {
const joined = target.paths.join(", ")
return joined.length > 40
? `folders: ${joined.slice(0, 37)}...`
: `folders: ${joined}`
}
}
}
// -- review execution ----------------------------------------------------
async function startReview(target: ReviewTarget): Promise<void> {
const prompt = await buildReviewPrompt(target)
const hint = getUserFacingHint(target)
const cleared = await api.client.tui.clearPrompt()
const appended = await api.client.tui.appendPrompt({
text: prompt,
})
// `prompt.submit` is ignored unless the prompt input is focused.
// When this runs from a dialog, focus returns on the next tick.
await sleep(50)
const submitted = await api.client.tui.submitPrompt()
if (!cleared || !appended) {
if (!cleared || !appended || !submitted) {
api.ui.toast({
message: "Failed to draft review prompt",
message: "Failed to start review prompt automatically",
variant: "error",
})
return
}
api.ui.toast({
message: `Starting review: ${hint}`,
variant: "info",
})
}
// -- dialogs -------------------------------------------------------------
function showReviewSelector(): void {
const options: TuiDialogSelectOption<string>[] = [
async function showReviewSelector(): Promise<void> {
const smartDefault = await getSmartDefault()
const options: TuiDialogSelectOption<ReviewSelectorValue>[] = [
{
title: "Working-copy changes",
title: "Review working-copy changes",
value: "workingCopy",
description: "Review uncommitted changes",
},
{
title: "Against a bookmark",
title: "Review against a base bookmark",
value: "baseBookmark",
description: "PR-style review against a base",
description: "(local)",
},
{
title: "A specific change",
title: "Review a change",
value: "change",
description: "Review a single jj change",
},
{
title: "A pull request",
title: "Review a pull request",
value: "pullRequest",
description: "Materialize and review a GitHub PR",
description: "(GitHub PR)",
},
{
title: "A folder (snapshot)",
title: "Review a folder (or more)",
value: "folder",
description: "Review files directly, no diff",
description: "(snapshot, not diff)",
},
{
title: reviewCustomInstructions
? "Remove custom review instructions"
: "Add custom review instructions",
value: "toggleCustomInstructions",
description: reviewCustomInstructions
? "(currently set)"
: "(applies to all review modes)",
},
]
api.ui.dialog.replace(
() =>
api.ui.DialogSelect({
title: "Review",
title: "Select a review preset",
options,
current: smartDefault,
onSelect: (option) => {
api.ui.dialog.clear()
switch (option.value) {
@@ -542,43 +1001,88 @@ const plugin: TuiPlugin = async (api) => {
case "folder":
showFolderInput()
break
case "toggleCustomInstructions":
if (reviewCustomInstructions) {
setReviewCustomInstructions(undefined)
api.ui.toast({
message: "Custom review instructions removed",
variant: "info",
})
void showReviewSelector()
break
}
showCustomInstructionsInput()
break
}
},
}),
)
}
function showCustomInstructionsInput(): void {
api.ui.dialog.replace(
() =>
api.ui.DialogPrompt({
title: "Custom review instructions",
placeholder: "focus on performance regressions",
value: reviewCustomInstructions,
onConfirm: (value) => {
const next = normalizeCustomInstructions(value)
api.ui.dialog.clear()
if (!next) {
api.ui.toast({
message: "Custom review instructions not changed",
variant: "info",
})
void showReviewSelector()
return
}
setReviewCustomInstructions(next)
api.ui.toast({
message: "Custom review instructions saved",
variant: "success",
})
void showReviewSelector()
},
onCancel: () => {
api.ui.dialog.clear()
void showReviewSelector()
},
}),
)
}
async function showBookmarkSelector(): Promise<void> {
api.ui.toast({ message: "Loading bookmarks...", variant: "info" })
const allBookmarks = await getBookmarks()
const currentBookmarks = await getCurrentBookmarks()
const defaultBookmark = await getDefaultBookmark()
const bookmarks = await getReviewBookmarks()
const currentBookmarks = await getCurrentReviewBookmarks()
const defaultBookmark = await getDefaultBookmarkRef()
const currentKeys = new Set(
currentBookmarks.map((b) => `${b.name}@${b.remote ?? ""}`),
)
const candidates = allBookmarks.filter(
(b) => !currentKeys.has(`${b.name}@${b.remote ?? ""}`),
const candidates = bookmarks.filter(
(bookmark) =>
!currentBookmarks.some((currentBookmark) =>
bookmarkRefsEqual(bookmark, currentBookmark),
),
)
if (candidates.length === 0) {
const currentLabel = currentBookmarks[0]
? bookmarkLabel(currentBookmarks[0])
: undefined
api.ui.toast({
message: "No other bookmarks found",
message: currentLabel
? `No other bookmarks found (current bookmark: ${currentLabel})`
: "No bookmarks found",
variant: "error",
})
return
}
// Sort: default first, then local before remote
const defaultKey = defaultBookmark
? `${defaultBookmark.name}@${defaultBookmark.remote ?? ""}`
: null
const sorted = candidates.sort((a, b) => {
const aKey = `${a.name}@${a.remote ?? ""}`
const bKey = `${b.name}@${b.remote ?? ""}`
if (aKey === defaultKey) return -1
if (bKey === defaultKey) return 1
if (defaultBookmark && bookmarkRefsEqual(a, defaultBookmark)) return -1
if (defaultBookmark && bookmarkRefsEqual(b, defaultBookmark)) return 1
if (!!a.remote !== !!b.remote) return a.remote ? 1 : -1
return bookmarkLabel(a).localeCompare(bookmarkLabel(b))
})
@@ -588,10 +1092,10 @@ const plugin: TuiPlugin = async (api) => {
title: bookmarkLabel(b),
value: b,
description:
`${b.name}@${b.remote ?? ""}` === defaultKey
defaultBookmark && bookmarkRefsEqual(b, defaultBookmark)
? "(default)"
: b.remote
? `remote: ${b.remote}`
? `(remote ${b.remote})`
: undefined,
}),
)
@@ -599,7 +1103,7 @@ const plugin: TuiPlugin = async (api) => {
api.ui.dialog.replace(
() =>
api.ui.DialogSelect({
title: "Base bookmark",
title: "Select base bookmark",
placeholder: "Filter bookmarks...",
options,
onSelect: (option) => {
@@ -631,7 +1135,7 @@ const plugin: TuiPlugin = async (api) => {
api.ui.dialog.replace(
() =>
api.ui.DialogSelect({
title: "Change to review",
title: "Select change to review",
placeholder: "Filter changes...",
options,
onSelect: (option) => {
@@ -646,11 +1150,20 @@ const plugin: TuiPlugin = async (api) => {
)
}
function showPrInput(): void {
async function showPrInput(): Promise<void> {
if (await hasWorkingCopyChanges()) {
api.ui.toast({
message:
"Cannot materialize PR: you have local jj changes. Please snapshot or discard them first.",
variant: "error",
})
return
}
api.ui.dialog.replace(
() =>
api.ui.DialogPrompt({
title: "PR number or URL",
title: "Enter PR number or URL",
placeholder:
"123 or https://github.com/owner/repo/pull/123",
onConfirm: (value) => {
@@ -672,7 +1185,12 @@ const plugin: TuiPlugin = async (api) => {
async function handlePrReview(prNumber: number): Promise<void> {
api.ui.toast({
message: `Materializing PR #${prNumber}...`,
message: `Fetching PR #${prNumber} info...`,
variant: "info",
})
api.ui.toast({
message: `Materializing PR #${prNumber} with jj...`,
variant: "info",
duration: 10000,
})
@@ -684,8 +1202,8 @@ const plugin: TuiPlugin = async (api) => {
}
api.ui.toast({
message: `PR #${prNumber} materialized: ${result.title}`,
variant: "success",
message: `Materialized PR #${prNumber} (${result.headBookmark}@${result.remote})`,
variant: "info",
})
await startReview({
@@ -701,13 +1219,13 @@ const plugin: TuiPlugin = async (api) => {
api.ui.dialog.replace(
() =>
api.ui.DialogPrompt({
title: "Paths to review",
placeholder: "src docs lib/utils.ts",
title: "Enter folders/files to review",
placeholder: ".",
onConfirm: (value) => {
const paths = value
.split(/\s+/)
.map((p) => p.trim())
.filter(Boolean)
.filter((p) => p.length > 0)
if (paths.length === 0) {
api.ui.toast({
message: "No paths provided",
@@ -732,12 +1250,12 @@ const plugin: TuiPlugin = async (api) => {
inJjRepo
? [
{
title: "Review code changes (jj)",
value: "jj-review",
title: "Review code changes",
value: "review",
description:
"Working-copy, bookmark, change, PR, or folder",
slash: { name: "jj-review" },
onSelect: () => showReviewSelector(),
"Review code changes (PR, working copy, bookmark, change, or folder)",
slash: { name: "review", aliases: ["jj-review"] },
onSelect: () => void showReviewSelector(),
},
]
: [],
@@ -745,6 +1263,6 @@ const plugin: TuiPlugin = async (api) => {
}
export default {
id: "jj-review",
id: "review",
tui: plugin,
} satisfies TuiPluginModule

File diff suppressed because it is too large Load Diff

View File

@@ -1,44 +0,0 @@
{inputs, ...}: final: prev: {
qmd =
prev.buildNpmPackage rec {
pname = "qmd";
version = "2.0.1";
src = inputs.qmd;
npmDepsFetcherVersion = 2;
npmDepsHash = "sha256-sAyCG43p3JELQ2lazwRrsdmW9Q4cOy45X6ZagBmitGU=";
nativeBuildInputs = [
prev.makeWrapper
prev.python3
prev.pkg-config
prev.cmake
];
buildInputs = [prev.sqlite];
dontConfigure = true;
postPatch = ''
cp ${./qmd-package-lock.json} package-lock.json
'';
npmBuildScript = "build";
dontNpmPrune = true;
installPhase = ''
runHook preInstall
mkdir -p $out/lib/node_modules/qmd $out/bin
cp -r bin dist node_modules package.json package-lock.json LICENSE CHANGELOG.md $out/lib/node_modules/qmd/
makeWrapper ${prev.nodejs}/bin/node $out/bin/qmd \
--add-flags $out/lib/node_modules/qmd/dist/cli/qmd.js \
--set LD_LIBRARY_PATH ${prev.lib.makeLibraryPath [prev.sqlite]}
runHook postInstall
'';
meta = with prev.lib; {
description = "On-device search engine for markdown notes, meeting transcripts, and knowledge bases";
homepage = "https://github.com/tobi/qmd";
license = licenses.mit;
mainProgram = "qmd";
platforms = platforms.unix;
};
};
}

View File

@@ -4,7 +4,6 @@
opencodeSecretPath = secretPath "opencode-api-key";
in {
den.aspects.ai-tools.homeManager = {
config,
lib,
pkgs,
inputs',
@@ -62,7 +61,7 @@ in {
opensrc = {
enabled = true;
type = "local";
command = ["node" "/home/cschmatzler/.bun/bin/opensrc-mcp"];
command = ["opensrc-mcp"];
};
context7 = {
enabled = true;
@@ -78,22 +77,6 @@ in {
};
};
systemd.user.services.opencode-server = {
Unit = {
Description = "OpenCode AI server";
After = ["default.target"];
};
Service = {
ExecStart = "${inputs'.llm-agents.packages.opencode}/bin/opencode serve --port 18822 --hostname 0.0.0.0";
Restart = "on-failure";
RestartSec = 5;
Environment = "PATH=${config.home.profileDirectory}/bin:/run/current-system/sw/bin";
};
Install = {
WantedBy = ["default.target"];
};
};
xdg.configFile = {
"opencode/agent" = {
source = ./_opencode/agent;

View File

@@ -35,7 +35,6 @@
adguardhome = ./adguardhome.nix;
cache = ./cache.nix;
gitea = ./gitea.nix;
notability = ./notability.nix;
opencode = ./opencode.nix;
paperless = ./paperless.nix;

View File

@@ -54,7 +54,6 @@
inputs.nixpkgs.follows = "nixpkgs";
};
llm-agents.url = "github:numtide/llm-agents.nix";
qmd.url = "github:tobi/qmd";
# Overlay inputs
himalaya.url = "github:pimalaya/himalaya";
jj-ryu = {

View File

@@ -74,7 +74,6 @@ in
den.aspects.opencode-api-key
den.aspects.adguardhome
den.aspects.cache
den.aspects.notability
den.aspects.paperless
];
nixos = {...}: {

View File

@@ -49,12 +49,27 @@
den.aspects.tailscale.nixos = {
services.tailscale = {
enable = true;
extraSetFlags = ["--ssh"];
openFirewall = true;
permitCertUid = "caddy";
useRoutingFeatures = "server";
};
};
den.aspects.mosh.nixos = {
programs.mosh = {
enable = true;
openFirewall = false;
};
networking.firewall.interfaces.tailscale0.allowedUDPPortRanges = [
{
from = 60000;
to = 61000;
}
];
};
den.aspects.tailscale.darwin = {
services.tailscale.enable = true;
};

View File

@@ -1,135 +0,0 @@
{lib, ...}: let
caddyLib = import ./_lib/caddy.nix;
local = import ./_lib/local.nix;
secretLib = import ./_lib/secrets.nix {inherit lib;};
inherit (local) user;
notabilityScripts = ./_notability;
tahani = local.hosts.tahani;
in {
den.aspects.notability.nixos = {
config,
inputs',
pkgs,
...
}: let
homeDir = tahani.home;
dataRoot = "${homeDir}/.local/share/notability-ingest";
stateRoot = "${homeDir}/.local/state/notability-ingest";
notesRoot = "${homeDir}/Notes";
webdavRoot = "${dataRoot}/webdav-root";
userPackages = with pkgs; [
qmd
poppler-utils
rclone
sqlite
zk
];
commonPath = with pkgs;
[
coreutils
inotify-tools
nushell
util-linux
]
++ userPackages;
commonEnvironment = {
HOME = homeDir;
NOTABILITY_ARCHIVE_ROOT = "${dataRoot}/archive";
NOTABILITY_DATA_ROOT = dataRoot;
NOTABILITY_DB_PATH = "${stateRoot}/db.sqlite";
NOTABILITY_NOTES_DIR = notesRoot;
NOTABILITY_RENDER_ROOT = "${dataRoot}/rendered-pages";
NOTABILITY_SESSIONS_ROOT = "${stateRoot}/sessions";
NOTABILITY_STATE_ROOT = stateRoot;
NOTABILITY_TRANSCRIPT_ROOT = "${stateRoot}/transcripts";
NOTABILITY_WEBDAV_ROOT = webdavRoot;
XDG_CONFIG_HOME = "${homeDir}/.config";
};
mkTmpDirRule = path: "d ${path} 0755 ${user.name} users -";
mkNotabilityService = {
description,
script,
after ? [],
requires ? [],
environment ? {},
}: {
inherit after description requires;
wantedBy = ["multi-user.target"];
path = commonPath;
environment = commonEnvironment // environment;
serviceConfig = {
ExecStart = "${pkgs.nushell}/bin/nu ${notabilityScripts}/${script}";
Group = "users";
Restart = "always";
RestartSec = 5;
User = user.name;
WorkingDirectory = homeDir;
};
};
in {
sops.secrets.tahani-notability-webdav-password =
secretLib.mkUserBinarySecret {
name = "tahani-notability-webdav-password";
sopsFile = ../secrets/tahani-notability-webdav-password;
};
home-manager.users.${user.name} = {
home.packages = userPackages;
home.file.".config/qmd/index.yml".text = ''
collections:
notes:
path: ${notesRoot}
pattern: "**/*.md"
'';
};
systemd.tmpfiles.rules =
builtins.map mkTmpDirRule [
notesRoot
dataRoot
webdavRoot
"${dataRoot}/archive"
"${dataRoot}/rendered-pages"
stateRoot
"${stateRoot}/jobs"
"${stateRoot}/jobs/queued"
"${stateRoot}/jobs/running"
"${stateRoot}/jobs/failed"
"${stateRoot}/jobs/done"
"${stateRoot}/jobs/results"
"${stateRoot}/sessions"
"${stateRoot}/transcripts"
];
services.caddy.virtualHosts =
caddyLib.mkTailscaleVHost {
name = "tahani";
configText = ''
handle /notability* {
reverse_proxy 127.0.0.1:9980
}
'';
};
systemd.services.notability-webdav =
mkNotabilityService {
description = "Notability WebDAV landing zone";
script = "webdav.nu";
after = ["network.target"];
environment = {
NOTABILITY_WEBDAV_ADDR = "127.0.0.1:9980";
NOTABILITY_WEBDAV_BASEURL = "/notability";
NOTABILITY_WEBDAV_PASSWORD_FILE = config.sops.secrets.tahani-notability-webdav-password.path;
NOTABILITY_WEBDAV_USER = "notability";
};
};
systemd.services.notability-watch =
mkNotabilityService {
description = "Watch and ingest Notability WebDAV uploads";
script = "watch.nu";
after = ["notability-webdav.service"];
requires = ["notability-webdav.service"];
};
};
}

View File

@@ -20,8 +20,6 @@
(import ./_overlays/jj-ryu.nix {inherit inputs;})
# cog-cli
(import ./_overlays/cog-cli.nix {inherit inputs;})
# qmd
(import ./_overlays/qmd.nix {inherit inputs;})
# jj-starship (passes through upstream overlay)
(import ./_overlays/jj-starship.nix {inherit inputs;})
# zjstatus

View File

@@ -2,6 +2,7 @@
den.aspects.host-nixos-base.includes = [
den.aspects.nixos-system
den.aspects.core
den.aspects.mosh
den.aspects.openssh
den.aspects.tailscale
];

View File

@@ -24,6 +24,7 @@ in {
jq
killall
lsof
mosh
ouch
ov
sd

View File

@@ -1,30 +0,0 @@
{
"data": "ENC[AES256_GCM,data:qZCh11bq1W7FwXMrDX5KMOQFsgsKgbhimZ4TDNvv1BDU,iv:PJJJB5uyhuTUSA4doQ6h6qMbmPgerPv+FfsJ0f20kYY=,tag:lXpit9T7K2rGUu1zsJH6dg==,type:str]",
"sops": {
"age": [
{
"recipient": "age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBvNXBjN3RZcGd2R2MyQWhR\nenBpa3VHMFhkWDEveUtmZWtPSk01QkhUVFJFCnRSc3ZGdFFjVDJnbHpwKzZ1TUdI\nZUdWQzM2bmZ1RUl4UVpCbDJoL0RkQncKLS0tIHRxUzFiaS8wekdCQ0Z0dTMxSnZ0\nS0UycFNMSUJHcVlkR2JZNlZsbldoaUkKe4EaYIquhABMEywizJXzEVEM1JbEwFqU\nAmQ6R+p4mNgaR5HCrnINQId3qqVfsP2UDqPDepERZIA0V2E5h9ckfQ==\n-----END AGE ENCRYPTED FILE-----\n"
},
{
"recipient": "age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBBZ1hYenpVTm1lTFdjTEJj\nTUN5MzNtbzdWNzQ2VE9tRlJJRVRYTUtLOXpnCnlLWTZPNGE5NDlwRHhWSnlTNUhv\nc3VZVklEZDB5dXlFc01wcEQxckl0NjgKLS0tIEE5T2JmNlJaYkZpWkhYdDhPSTlW\nei96YmhUWUZ2enVnRjhKOVlNZmNHa3cKxaHBtCwLDLNcscptlDk6ta/i491lLPt6\nOh/RtbkxtJ02cahIsKgajspOElx8u2Nb3/lmK51JbUIexH9TDQ+3tg==\n-----END AGE ENCRYPTED FILE-----\n"
},
{
"recipient": "age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBJbFFpQzB2OU9jYUZlL2Nl\nOEZ0WGcyR1BpSmZGU0Vxa0N6WGpCbXBXZGxJCnlLK0JJWElndC9KRGN5d1NNd0tj\nUkExQ0tTSGRKQjJHUGtaWUtKS285MU0KLS0tIGI5cWtVcW43b2Q5VXRidllzamtB\nV1IxYnN1KzdaaXdvWG96a2VkZ0ZvWGsKxdbXwbgFIc3/3VjwUJ1A+cX0oaT+oojz\nrI9Dmk782U/dQrcMv1lRBIWWtAdAqS6GiQ1aUKk5aHpuHOZeHHFjMw==\n-----END AGE ENCRYPTED FILE-----\n"
},
{
"recipient": "age1ez6j3r5wdp0tjy7n5qzv5vfakdc2nh2zeu388zu7a80l0thv052syxq5e2",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA0aTgwQ3ZEVG41eW9MQ1RX\nSElRdkdvL21kZ2ZLeGNPbGJiNll5WjdsM2gwCmJQVmJjWEJBaVhEKzJqYWlib2JX\ndWRzSE9QTVQ1c004dldzR2NtR3pvQlUKLS0tIEsvZDNnNWJJaWZyOCtYUEs1eklh\nNXl2dUM0amVtSmdjTy83ZzBSeGp3Q0UKQ/cUYPACFNcxulzW964ftsHjoCBRGB66\nc1e/ObQNM+b+be5UzJi3/gago9CHRzZ3Rp6zE9i5oQBzgLGWlJuPNQ==\n-----END AGE ENCRYPTED FILE-----\n"
},
{
"recipient": "age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBLNUk5aHBqdEJoYWdaeVlx\nOUkrSXMvRFRmQ29QRE5hWTlHdlcwOUxFRXdRCnE0L1BQdHZDRWRCQUZ2dHQ2Witi\nQ1g5OFFWM2tPT0xEZUZvdXJNdm9aWTgKLS0tIENvM1h1V042L3JHV1pWeDAxdG84\nUTBTZjdHa1lCNGJSRG1iZmtpc1laZTQK/twptPseDi9DM/7NX2F0JO1BEkqklbh1\nxQ1Qwpy4K/P2pFTOBKqDb62DaIALxiGA1Q55dw+fPRSsnL8VcxG8JA==\n-----END AGE ENCRYPTED FILE-----\n"
}
],
"lastmodified": "2026-03-25T11:23:08Z",
"mac": "ENC[AES256_GCM,data:UM0QWfQueExEHRjqNAEIgwpVBjgpd0a6DXxDeRci08qMzTypTlWIofUGMyM1k+J+mUKr3vWMe3q48OwVtUaXnbWimH+8uFEwb5x0e+ayTg+w/C23d+JJmQIX8g5JXtknUAZFNrh3wdZOadYYRr/vDzCKud4lMrmFBKFXsH1DPEI=,iv:kTx8omo8Gt4mTLAs6MoLxj4GizWpxlSXMCTWNlRR5SY=,tag:PB7nMCVxCLRQdhC/eelK/w==,type:str]",
"version": "3.12.2"
}
}