Compare commits

...

38 Commits

Author SHA1 Message Date
85eb20c4cb stuff 2026-04-10 15:41:24 +02:00
276378b57c flk 2026-04-09 08:33:04 +00:00
03b968513b flk 2026-04-08 08:07:51 +00:00
e545d38314 sm 2026-04-08 08:07:51 +00:00
564ccd2559 keys 2026-04-07 11:43:58 +00:00
f32f51970b up 2026-04-07 11:41:55 +00:00
0e50839ce0 up 2026-04-07 11:02:37 +00:00
ed995a1edd flk 2026-04-07 11:02:37 +00:00
2424b87b46 flk 2026-04-07 11:02:37 +00:00
37ef245374 flk 2026-04-02 14:46:44 +00:00
ac40abe696 flk 2026-04-02 14:45:05 +00:00
a611d7fb99 clean 2026-04-02 13:01:08 +00:00
89c430b940 review more 2026-04-02 12:57:50 +00:00
1dd7d8a2d8 fix review 2026-04-02 12:57:50 +00:00
69697c822c model 2026-04-02 11:12:13 +00:00
8def00f368 review 2026-04-02 10:58:32 +00:00
c907354a4f flk 2026-04-02 10:09:06 +00:00
80ff1f8b03 fuck 2026-04-01 16:11:02 +00:00
9b5069693a fix opensrc mcp command 2026-04-01 16:11:02 +00:00
2cad84ff26 rm direnv 2026-04-01 16:11:02 +00:00
bbfcc366c2 up 2026-04-01 11:40:32 +00:00
8b09aa9705 cleanup 2026-04-01 11:40:32 +00:00
0eaa830050 Add jj-review TUI plugin to global opencode config 2026-04-01 11:40:32 +00:00
8577807650 Add task classification taxonomy to moonshot agent 2026-04-01 11:40:32 +00:00
a186c136f0 Rename anvil agent to moonshot, bump reasoningEffort to xhigh 2026-04-01 11:40:32 +00:00
e6b5ff0fb8 Add anvil agent — autonomous GPT-5.4 deep worker 2026-04-01 11:40:32 +00:00
6a8bda9031 flk 2026-04-01 11:40:32 +00:00
073cc1aa38 fix TUI plugin: use default export with id (required by plugin loader) 2026-04-01 11:40:32 +00:00
cea666f3d8 rename review plugin slash command to /jj-review to avoid built-in conflict 2026-04-01 11:40:32 +00:00
813fd347d5 Remove pi agent infrastructure 2026-04-01 11:40:32 +00:00
66ff22f9e6 add opencode review plugin (port from pi-coding-agent extension) 2026-04-01 11:40:32 +00:00
86afae7d6c sneaky 2026-04-01 11:40:32 +00:00
5e46938488 bring oc back 2026-04-01 11:40:32 +00:00
8f3951522c chatgpt 2026-04-01 11:40:32 +00:00
6e5af04278 up 2026-04-01 11:40:32 +00:00
01cf320c2e jj 2026-03-30 14:40:06 +00:00
b69cc789b1 flk 2026-03-30 10:35:50 +00:00
642598bbab chore: update cog-cli to 0.24.1 2026-03-30 10:35:50 +00:00
55 changed files with 2176 additions and 11635 deletions

View File

@@ -1,12 +0,0 @@
{
"id": "95b075f0",
"title": "Fix Wipr 2 mas installation failure in nixos-config",
"tags": [
"bugfix",
"mas",
"nix-darwin"
],
"status": "in_progress",
"created_at": "2026-03-29T18:55:14.812Z",
"assigned_to_session": "8318f7d4-ccd1-4467-b7c9-fb05e53e4a1d"
}

View File

@@ -2,7 +2,7 @@ keys:
- &user_cschmatzler age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7 - &user_cschmatzler age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7
- &host_tahani age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm - &host_tahani age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm
- &host_michael age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j - &host_michael age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j
- &host_jason age1ez6j3r5wdp0tjy7n5qzv5vfakdc2nh2zeu388zu7a80l0thv052syxq5e2 - &host_janet age1f9h725ewwwwwkelnrvdvrurg6fcsn3zxrxdt0v6v8ys0nzngcsvqu77nc8
- &host_chidi age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3 - &host_chidi age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3
creation_rules: creation_rules:
- path_regex: secrets/[^/]+$ - path_regex: secrets/[^/]+$
@@ -11,5 +11,5 @@ creation_rules:
- *user_cschmatzler - *user_cschmatzler
- *host_tahani - *host_tahani
- *host_michael - *host_michael
- *host_jason - *host_janet
- *host_chidi - *host_chidi

278
flake.lock generated
View File

@@ -114,11 +114,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1773000227, "lastModified": 1775037210,
"narHash": "sha256-zm3ftUQw0MPumYi91HovoGhgyZBlM4o3Zy0LhPNwzXE=", "narHash": "sha256-KM2WYj6EA7M/FVZVCl3rqWY+TFV5QzSyyGE2gQxeODU=",
"owner": "LnL7", "owner": "LnL7",
"repo": "nix-darwin", "repo": "nix-darwin",
"rev": "da529ac9e46f25ed5616fd634079a5f3c579135f", "rev": "06648f4902343228ce2de79f291dd5a58ee12146",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -130,11 +130,11 @@
}, },
"den": { "den": {
"locked": { "locked": {
"lastModified": 1774806415, "lastModified": 1775702491,
"narHash": "sha256-39wLrZ36BJrj31TF04RzdYkOk+8BYbw5+XKnY6p1bN8=", "narHash": "sha256-5BCNtE/zCLSheltliy4hTdwsq0Boj/W1XRIX8n89nqA=",
"owner": "vic", "owner": "vic",
"repo": "den", "repo": "den",
"rev": "25b8303f45d719f7052473d98907091bae9dd679", "rev": "d267c458e384b57317d06d45f7c65f7fb03fae4b",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -191,11 +191,11 @@
"rust-analyzer-src": "rust-analyzer-src" "rust-analyzer-src": "rust-analyzer-src"
}, },
"locked": { "locked": {
"lastModified": 1774768700, "lastModified": 1775721346,
"narHash": "sha256-kc8QmtrXY6VyBS6Uayt1M9hzPXR7TfKng8r5WRH8Iw4=", "narHash": "sha256-ogqjruvVBYEj8sWM3viOucSo1Pna9c147EKQOfA+p3I=",
"owner": "nix-community", "owner": "nix-community",
"repo": "fenix", "repo": "fenix",
"rev": "6ccc56115c2c6901e40c1cd7a64c84ddbe09060a", "rev": "99fde43dfee2a672e4e37ef211e0844337e5b725",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -282,11 +282,11 @@
}, },
"flake-file": { "flake-file": {
"locked": { "locked": {
"lastModified": 1774666175, "lastModified": 1774886516,
"narHash": "sha256-WaZxvtOvVNikiNTen2Emhds2RvzFCWIb7KU9C0eWrNA=", "narHash": "sha256-w2LoQVM6DXrSdGUZBZqa1nYkMzHoB0t82DrptzZKhTs=",
"owner": "vic", "owner": "vic",
"repo": "flake-file", "repo": "flake-file",
"rev": "953d01f3ae5ba50869c5e1248062198f73e971bf", "rev": "3daadf37de2bb85b0ff34e2a7ab0d71e077c2b9e",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -302,11 +302,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1772408722, "lastModified": 1775087534,
"narHash": "sha256-rHuJtdcOjK7rAHpHphUb1iCvgkU3GpfvicLMwwnfMT0=", "narHash": "sha256-91qqW8lhL7TLwgQWijoGBbiD4t7/q75KTi8NxjVmSmA=",
"owner": "hercules-ci", "owner": "hercules-ci",
"repo": "flake-parts", "repo": "flake-parts",
"rev": "f20dc5d9b8027381c474144ecabc9034d6a839a3", "rev": "3107b77cd68437b9a76194f0f7f9c55f2329ca5b",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -323,11 +323,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1772408722, "lastModified": 1775087534,
"narHash": "sha256-rHuJtdcOjK7rAHpHphUb1iCvgkU3GpfvicLMwwnfMT0=", "narHash": "sha256-91qqW8lhL7TLwgQWijoGBbiD4t7/q75KTi8NxjVmSmA=",
"owner": "hercules-ci", "owner": "hercules-ci",
"repo": "flake-parts", "repo": "flake-parts",
"rev": "f20dc5d9b8027381c474144ecabc9034d6a839a3", "rev": "3107b77cd68437b9a76194f0f7f9c55f2329ca5b",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -344,11 +344,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1772408722, "lastModified": 1775087534,
"narHash": "sha256-rHuJtdcOjK7rAHpHphUb1iCvgkU3GpfvicLMwwnfMT0=", "narHash": "sha256-91qqW8lhL7TLwgQWijoGBbiD4t7/q75KTi8NxjVmSmA=",
"owner": "hercules-ci", "owner": "hercules-ci",
"repo": "flake-parts", "repo": "flake-parts",
"rev": "f20dc5d9b8027381c474144ecabc9034d6a839a3", "rev": "3107b77cd68437b9a76194f0f7f9c55f2329ca5b",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -414,24 +414,6 @@
"type": "github" "type": "github"
} }
}, },
"flake-utils_3": {
"inputs": {
"systems": "systems_6"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"himalaya": { "himalaya": {
"inputs": { "inputs": {
"fenix": "fenix_2", "fenix": "fenix_2",
@@ -459,11 +441,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1774738535, "lastModified": 1775683737,
"narHash": "sha256-2jfBEZUC67IlnxO5KItFCAd7Oc+1TvyV/jQlR+2ykGQ=", "narHash": "sha256-oBYyowo6yfgb95Z78s3uTnAd9KkpJpwzjJbfnpLaM2Y=",
"owner": "nix-community", "owner": "nix-community",
"repo": "home-manager", "repo": "home-manager",
"rev": "769e07ef8f4cf7b1ec3b96ef015abec9bc6b1e2a", "rev": "7ba4ee4228ed36123c7cb75d50524b43514ef992",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -475,11 +457,11 @@
"homebrew-cask": { "homebrew-cask": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1774803484, "lastModified": 1775700724,
"narHash": "sha256-D9rjYsI9pvWYMXUG4HcF2zxLSpwDUl5YVRSEwQGs8z4=", "narHash": "sha256-qQm9uIF+tI7gamLMa7DSXSQQzLQalEtOa7PHPxNkbr8=",
"owner": "homebrew", "owner": "homebrew",
"repo": "homebrew-cask", "repo": "homebrew-cask",
"rev": "2d36600de3dfb5da1c88bae7c2130615033f605e", "rev": "c622bff3b88557e3c870104db0426b93e0767a8f",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -491,11 +473,11 @@
"homebrew-core": { "homebrew-core": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1774806566, "lastModified": 1775721921,
"narHash": "sha256-8tEiyTqJGcmR1wHsfkZReqMl/Vk+Uz0NdpsOPoeS/o4=", "narHash": "sha256-s6K2QbKa4OJlblFp3zMSh0/2PM2zpWpAd4ZnREirj/I=",
"owner": "homebrew", "owner": "homebrew",
"repo": "homebrew-core", "repo": "homebrew-core",
"rev": "0215c9b45ea483da06e0c026fa54d81c59001f6d", "rev": "70028a68b515145bbeccb2961240275ab6eb9e82",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -553,11 +535,11 @@
"jj-nvim": { "jj-nvim": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1773914813, "lastModified": 1775551442,
"narHash": "sha256-UuNcOfgsWuHu9hx6NT/FbQ0E8T6nRY1X6O6CDRtH8Sk=", "narHash": "sha256-hoU+DenrgxNwvLNmDtIsJ5yB5fhRjDRPOOL8WW9bpZM=",
"owner": "NicolasGB", "owner": "NicolasGB",
"repo": "jj.nvim", "repo": "jj.nvim",
"rev": "a6e163bcc3a6b75e5b6d4190b64ed4b39f8ddb0c", "rev": "2dbe2c73c599a29e86e4123b42e430828b1f01d9",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -611,11 +593,11 @@
"treefmt-nix": "treefmt-nix" "treefmt-nix": "treefmt-nix"
}, },
"locked": { "locked": {
"lastModified": 1774806813, "lastModified": 1775705124,
"narHash": "sha256-fTa4qUu/ARkL+1aH8JYGe6l3LByzTNDJDACKcbTRnIk=", "narHash": "sha256-OUtgrn0k7DYnAP9skY2rOJSWJyn4w5tnUcF3lSJdfME=",
"owner": "numtide", "owner": "numtide",
"repo": "llm-agents.nix", "repo": "llm-agents.nix",
"rev": "defffa94a67a4cdbc18ae5650f0b69e4b92391c8", "rev": "ca76524952b00135dba57da62ce2dd123a1ba4be",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -655,11 +637,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1774742707, "lastModified": 1775693082,
"narHash": "sha256-a3FjZJxDOn0t18VwtIAgpNuUNaIEl6T+Awu5tXifQQw=", "narHash": "sha256-nnhkpfWsRutQh//KmVoIV7e9Gk90tBezjcoRr775BfU=",
"owner": "nix-community", "owner": "nix-community",
"repo": "neovim-nightly-overlay", "repo": "neovim-nightly-overlay",
"rev": "7966a9c203276bea3b7e8dd2e125fd2b4c8b6753", "rev": "21b2795e6aeb4a0110bdc7bd81bad59c022c9986",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -671,11 +653,11 @@
"neovim-src": { "neovim-src": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1774725909, "lastModified": 1775689880,
"narHash": "sha256-aOiiQCmjCrvo+jAUDO2oMa377FvOtU97aqvTm74ZRGU=", "narHash": "sha256-savZYhFAaBm3BQUdTrPOv7i5K18JFANJvyHv0uuvaWM=",
"owner": "neovim", "owner": "neovim",
"repo": "neovim", "repo": "neovim",
"rev": "d5516daf121aa718e79bcd423ee24c24492893c0", "rev": "eefb50e352a689ec1a0a55d6827abea79960cd3d",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -752,11 +734,11 @@
}, },
"nixpkgs_4": { "nixpkgs_4": {
"locked": { "locked": {
"lastModified": 1774610258, "lastModified": 1775639890,
"narHash": "sha256-HaThtroVD9wRdx7KQk0B75JmFcXlMUoEdDFNOMOlsOs=", "narHash": "sha256-9O9gNidrdzcb7vgKGtff7QiLtr0IsVaCi0pAXm8anhQ=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "832efc09b4caf6b4569fbf9dc01bec3082a00611", "rev": "456e8a9468b9d46bd8c9524425026c00745bc4d2",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -768,11 +750,11 @@
}, },
"nixpkgs_5": { "nixpkgs_5": {
"locked": { "locked": {
"lastModified": 1774807110, "lastModified": 1775722436,
"narHash": "sha256-YR+T4/iT/UYB8dZ38Hb9n+qCBlYYx3wGSSiue6DFcRQ=", "narHash": "sha256-Z7QmfL80jmUPoSQkMlCc+1MGfkugf7bG47H3UTsyi7Q=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "f39098836a87b4c554137aa462759762a6de6d86", "rev": "e73a61d035ee91f95bb0a6b95ce0b9d2866bd332",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -799,22 +781,6 @@
} }
}, },
"nixpkgs_7": { "nixpkgs_7": {
"locked": {
"lastModified": 1769188852,
"narHash": "sha256-aBAGyMum27K7cP5OR7BMioJOF3icquJMZDDgk6ZEg1A=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "a1bab9e494f5f4939442a57a58d0449a109593fe",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_8": {
"locked": { "locked": {
"lastModified": 1765934234, "lastModified": 1765934234,
"narHash": "sha256-pJjWUzNnjbIAMIc5gRFUuKCDQ9S1cuh3b2hKgA7Mc4A=", "narHash": "sha256-pJjWUzNnjbIAMIc5gRFUuKCDQ9S1cuh3b2hKgA7Mc4A=",
@@ -837,11 +803,11 @@
"systems": "systems_4" "systems": "systems_4"
}, },
"locked": { "locked": {
"lastModified": 1774802402, "lastModified": 1775307257,
"narHash": "sha256-L1UJ/zxKTyyaGGmytH6OYlgQ0HGSMhvPkvU+iz4Mkb8=", "narHash": "sha256-y9hEecHH4ennFwIcw1n480YCGh73DkEmizmQnyXuvgg=",
"owner": "nix-community", "owner": "nix-community",
"repo": "nixvim", "repo": "nixvim",
"rev": "cbd8536a05d1aae2593cb5c9ace1010c8c5845cb", "rev": "2e008bb941f72379d5b935d5bfe70ed8b7c793ff",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -850,86 +816,6 @@
"type": "github" "type": "github"
} }
}, },
"pi-agent-stuff": {
"flake": false,
"locked": {
"lastModified": 1774394773,
"narHash": "sha256-HguiVoKS87LEnqdUPLXv6VNDlA4zg9+QImZ3YnhlR2c=",
"owner": "mitsuhiko",
"repo": "agent-stuff",
"rev": "3bf6bd34e516af81d9c2b313b568031a785a15e2",
"type": "github"
},
"original": {
"owner": "mitsuhiko",
"repo": "agent-stuff",
"type": "github"
}
},
"pi-elixir": {
"flake": false,
"locked": {
"lastModified": 1772900407,
"narHash": "sha256-QoCPVdN5CYGe5288cJQmB10ds/UOucHIyG9z9E/4hsw=",
"owner": "dannote",
"repo": "pi-elixir",
"rev": "3b8f667beb696ce6ed456e762bfcf61e7326f5c4",
"type": "github"
},
"original": {
"owner": "dannote",
"repo": "pi-elixir",
"type": "github"
}
},
"pi-harness": {
"flake": false,
"locked": {
"lastModified": 1774794426,
"narHash": "sha256-pm1pfWAzDgRbgkdZwMMUOrlTXdcyRu/bUMrFeToPNEA=",
"owner": "aliou",
"repo": "pi-harness",
"rev": "5f4836a60ae6f562fe1f0b69c2ab5a8edc1bdc0b",
"type": "github"
},
"original": {
"owner": "aliou",
"repo": "pi-harness",
"type": "github"
}
},
"pi-mcp-adapter": {
"flake": false,
"locked": {
"lastModified": 1774247177,
"narHash": "sha256-HTexm+b+UUbJD4qwIqlNcVPhF/G7/MtBtXa0AdeztbY=",
"owner": "nicobailon",
"repo": "pi-mcp-adapter",
"rev": "c0919a29d263c2058c302641ddb04769c21be262",
"type": "github"
},
"original": {
"owner": "nicobailon",
"repo": "pi-mcp-adapter",
"type": "github"
}
},
"pi-rose-pine": {
"flake": false,
"locked": {
"lastModified": 1770936151,
"narHash": "sha256-6TzuWJPAn8zz+lUjZ3slFCNdPVd/Z2C+WoXFsLopk1g=",
"owner": "zenobi-us",
"repo": "pi-rose-pine",
"rev": "9b342f6e16d6b28c00c2f888ba2f050273981bdb",
"type": "github"
},
"original": {
"owner": "zenobi-us",
"repo": "pi-rose-pine",
"type": "github"
}
},
"pimalaya": { "pimalaya": {
"flake": false, "flake": false,
"locked": { "locked": {
@@ -946,25 +832,6 @@
"type": "github" "type": "github"
} }
}, },
"qmd": {
"inputs": {
"flake-utils": "flake-utils_2",
"nixpkgs": "nixpkgs_7"
},
"locked": {
"lastModified": 1774742449,
"narHash": "sha256-x6+O8KX2LVqL49MLZsvyENITC5pY+IiTrI59OSwxurU=",
"owner": "tobi",
"repo": "qmd",
"rev": "1fb2e2819e4024045203b4ea550ec793683baf2b",
"type": "github"
},
"original": {
"owner": "tobi",
"repo": "qmd",
"type": "github"
}
},
"root": { "root": {
"inputs": { "inputs": {
"code-review-nvim": "code-review-nvim", "code-review-nvim": "code-review-nvim",
@@ -994,12 +861,6 @@
"nixpkgs" "nixpkgs"
], ],
"nixvim": "nixvim", "nixvim": "nixvim",
"pi-agent-stuff": "pi-agent-stuff",
"pi-elixir": "pi-elixir",
"pi-harness": "pi-harness",
"pi-mcp-adapter": "pi-mcp-adapter",
"pi-rose-pine": "pi-rose-pine",
"qmd": "qmd",
"sops-nix": "sops-nix", "sops-nix": "sops-nix",
"zjstatus": "zjstatus" "zjstatus": "zjstatus"
} }
@@ -1007,11 +868,11 @@
"rust-analyzer-src": { "rust-analyzer-src": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1774569884, "lastModified": 1775663707,
"narHash": "sha256-E8iWEPzg7OnE0XXXjo75CX7xFauqzJuGZ5wSO9KS8Ek=", "narHash": "sha256-3cSvpBETRa8aDSrUCX1jGc6FSse3OWB7cXACIZW8BYI=",
"owner": "rust-lang", "owner": "rust-lang",
"repo": "rust-analyzer", "repo": "rust-analyzer",
"rev": "443ddcddd0c73b07b799d052f5ef3b448c2f3508", "rev": "8c5af725817905e462052d91a8d229b85ffa83a5",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -1083,11 +944,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1774760784, "lastModified": 1775682595,
"narHash": "sha256-D+tgywBHldTc0klWCIC49+6Zlp57Y4GGwxP1CqfxZrY=", "narHash": "sha256-0E9PohY/VuESLq0LR4doaH7hTag513sDDW5n5qmHd1Q=",
"owner": "Mic92", "owner": "Mic92",
"repo": "sops-nix", "repo": "sops-nix",
"rev": "8adb84861fe70e131d44e1e33c426a51e2e0bfa5", "rev": "d2e8438d5886e92bc5e7c40c035ab6cae0c41f76",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -1171,21 +1032,6 @@
"type": "github" "type": "github"
} }
}, },
"systems_6": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"treefmt-nix": { "treefmt-nix": {
"inputs": { "inputs": {
"nixpkgs": [ "nixpkgs": [
@@ -1194,11 +1040,11 @@
] ]
}, },
"locked": { "locked": {
"lastModified": 1773297127, "lastModified": 1775636079,
"narHash": "sha256-6E/yhXP7Oy/NbXtf1ktzmU8SdVqJQ09HC/48ebEGBpk=", "narHash": "sha256-pc20NRoMdiar8oPQceQT47UUZMBTiMdUuWrYu2obUP0=",
"owner": "numtide", "owner": "numtide",
"repo": "treefmt-nix", "repo": "treefmt-nix",
"rev": "71b125cd05fbfd78cab3e070b73544abe24c5016", "rev": "790751ff7fd3801feeaf96d7dc416a8d581265ba",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -1228,8 +1074,8 @@
"zjstatus": { "zjstatus": {
"inputs": { "inputs": {
"crane": "crane", "crane": "crane",
"flake-utils": "flake-utils_3", "flake-utils": "flake-utils_2",
"nixpkgs": "nixpkgs_8", "nixpkgs": "nixpkgs_7",
"rust-overlay": "rust-overlay" "rust-overlay": "rust-overlay"
}, },
"locked": { "locked": {

View File

@@ -68,27 +68,6 @@
nixpkgs.url = "github:nixos/nixpkgs/master"; nixpkgs.url = "github:nixos/nixpkgs/master";
nixpkgs-lib.follows = "nixpkgs"; nixpkgs-lib.follows = "nixpkgs";
nixvim.url = "github:nix-community/nixvim"; nixvim.url = "github:nix-community/nixvim";
pi-agent-stuff = {
url = "github:mitsuhiko/agent-stuff";
flake = false;
};
pi-elixir = {
url = "github:dannote/pi-elixir";
flake = false;
};
pi-harness = {
url = "github:aliou/pi-harness";
flake = false;
};
pi-mcp-adapter = {
url = "github:nicobailon/pi-mcp-adapter";
flake = false;
};
pi-rose-pine = {
url = "github:zenobi-us/pi-rose-pine";
flake = false;
};
qmd.url = "github:tobi/qmd";
sops-nix = { sops-nix = {
url = "github:Mic92/sops-nix"; url = "github:Mic92/sops-nix";
inputs.nixpkgs.follows = "nixpkgs"; inputs.nixpkgs.follows = "nixpkgs";

View File

@@ -1,190 +0,0 @@
/**
* No Git Extension
*
* Blocks direct git invocations and tells the LLM to use jj (Jujutsu) instead.
* Mentions of the word "git" in search patterns, strings, comments, etc. are allowed.
*/
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
import { isToolCallEventType } from "@mariozechner/pi-coding-agent";
type ShellToken =
| { type: "word"; value: string }
| { type: "operator"; value: string };
const COMMAND_PREFIXES = new Set(["env", "command", "builtin", "time", "sudo", "nohup", "nice"]);
const SHELL_KEYWORDS = new Set(["if", "then", "elif", "else", "do", "while", "until", "case", "in"]);
const SHELL_INTERPRETERS = new Set(["bash", "sh", "zsh", "fish", "nu"]);
function isAssignmentWord(value: string): boolean {
return /^[A-Za-z_][A-Za-z0-9_]*=.*/.test(value);
}
function tokenizeShell(command: string): ShellToken[] {
const tokens: ShellToken[] = [];
let current = "";
let quote: "'" | '"' | null = null;
const pushWord = () => {
if (!current) return;
tokens.push({ type: "word", value: current });
current = "";
};
for (let i = 0; i < command.length; i++) {
const char = command[i];
if (quote) {
if (quote === "'") {
if (char === "'") {
quote = null;
} else {
current += char;
}
continue;
}
if (char === '"') {
quote = null;
continue;
}
if (char === "\\") {
if (i + 1 < command.length) {
current += command[i + 1];
i += 1;
}
continue;
}
current += char;
continue;
}
if (char === "'" || char === '"') {
quote = char;
continue;
}
if (char === "\\") {
if (i + 1 < command.length) {
current += command[i + 1];
i += 1;
}
continue;
}
if (/\s/.test(char)) {
pushWord();
if (char === "\n") {
tokens.push({ type: "operator", value: "\n" });
}
continue;
}
const twoCharOperator = command.slice(i, i + 2);
if (twoCharOperator === "&&" || twoCharOperator === "||") {
pushWord();
tokens.push({ type: "operator", value: twoCharOperator });
i += 1;
continue;
}
if (char === ";" || char === "|" || char === "(" || char === ")") {
pushWord();
tokens.push({ type: "operator", value: char });
continue;
}
current += char;
}
pushWord();
return tokens;
}
function findCommandWord(words: string[]): { word?: string; index: number } {
for (let i = 0; i < words.length; i++) {
const word = words[i];
if (SHELL_KEYWORDS.has(word)) {
continue;
}
if (isAssignmentWord(word)) {
continue;
}
if (COMMAND_PREFIXES.has(word)) {
continue;
}
return { word, index: i };
}
return { index: words.length };
}
function getInlineShellCommand(words: string[], commandIndex: number): string | null {
for (let i = commandIndex + 1; i < words.length; i++) {
const word = words[i];
if (/^(?:-[A-Za-z]*c[A-Za-z]*|--command)$/.test(word)) {
return words[i + 1] ?? null;
}
}
return null;
}
function segmentContainsBlockedGit(words: string[]): boolean {
const { word, index } = findCommandWord(words);
if (!word) {
return false;
}
if (word === "git") {
return true;
}
if (word === "jj") {
return false;
}
if (SHELL_INTERPRETERS.has(word)) {
const inlineCommand = getInlineShellCommand(words, index);
return inlineCommand ? containsBlockedGitInvocation(inlineCommand) : false;
}
return false;
}
function containsBlockedGitInvocation(command: string): boolean {
const tokens = tokenizeShell(command);
let words: string[] = [];
for (const token of tokens) {
if (token.type === "operator") {
if (segmentContainsBlockedGit(words)) {
return true;
}
words = [];
continue;
}
words.push(token.value);
}
return segmentContainsBlockedGit(words);
}
export default function (pi: ExtensionAPI) {
pi.on("tool_call", async (event, _ctx) => {
if (!isToolCallEventType("bash", event)) return;
const command = event.input.command.trim();
if (containsBlockedGitInvocation(command)) {
return {
block: true,
reason: "git is not used in this project. Use jj (Jujutsu) instead.",
};
}
});
}

View File

@@ -1,28 +0,0 @@
/**
* No Scripting Extension
*
* Blocks python, perl, ruby, php, lua, node -e, and inline bash/sh scripts.
* Tells the LLM to use `nu -c` instead.
*/
import type { ExtensionAPI } from "@mariozechner/pi-coding-agent";
import { isToolCallEventType } from "@mariozechner/pi-coding-agent";
const SCRIPTING_PATTERN =
/(?:^|[;&|]\s*|&&\s*|\|\|\s*|\$\(\s*|`\s*)(?:python[23]?|perl|ruby|php|lua|node\s+-e|bash\s+-c|sh\s+-c)\s/;
export default function (pi: ExtensionAPI) {
pi.on("tool_call", async (event, _ctx) => {
if (!isToolCallEventType("bash", event)) return;
const command = event.input.command.trim();
if (SCRIPTING_PATTERN.test(command)) {
return {
block: true,
reason:
"Do not use python, perl, ruby, php, lua, node -e, or inline bash/sh for scripting. Use `nu -c` instead.",
};
}
});
}

View File

@@ -1,687 +0,0 @@
import { readFile, writeFile, mkdir, readdir } from "node:fs/promises";
import * as fs from "node:fs";
import * as os from "node:os";
import * as path from "node:path";
import * as crypto from "node:crypto";
import { Box, Text } from "@mariozechner/pi-tui";
import type { ExtensionAPI, ExtensionContext, ExtensionCommandContext, Model } from "@mariozechner/pi-coding-agent";
import {
createAgentSession,
DefaultResourceLoader,
getAgentDir,
SessionManager,
SettingsManager,
} from "@mariozechner/pi-coding-agent";
interface IngestManifest {
version: number;
job_id: string;
note_id: string;
operation: string;
requested_at: string;
title: string;
source_relpath: string;
source_path: string;
input_path: string;
archive_path: string;
output_path: string;
transcript_path: string;
result_path: string;
session_dir: string;
source_hash: string;
last_generated_output_hash?: string | null;
force_overwrite_generated?: boolean;
source_transport?: string;
}
interface IngestResult {
success: boolean;
job_id: string;
note_id: string;
archive_path: string;
source_hash: string;
session_dir: string;
output_path?: string;
output_hash?: string;
conflict_path?: string;
write_mode?: "create" | "overwrite" | "force-overwrite" | "conflict";
updated_main_output?: boolean;
transcript_path?: string;
error?: string;
}
interface FrontmatterInfo {
values: Record<string, string>;
body: string;
}
interface RenderedPage {
path: string;
image: {
type: "image";
source: {
type: "base64";
mediaType: string;
data: string;
};
};
}
const TRANSCRIBE_SKILL = "notability-transcribe";
const NORMALIZE_SKILL = "notability-normalize";
const STATUS_TYPE = "notability-status";
const DEFAULT_TRANSCRIBE_THINKING = "low" as const;
const DEFAULT_NORMALIZE_THINKING = "off" as const;
const PREFERRED_VISION_MODEL: [string, string] = ["openai-codex", "gpt-5.4"];
function getNotesRoot(): string {
return process.env.NOTABILITY_NOTES_DIR ?? path.join(os.homedir(), "Notes");
}
function getDataRoot(): string {
return process.env.NOTABILITY_DATA_ROOT ?? path.join(os.homedir(), ".local", "share", "notability-ingest");
}
function getRenderRoot(): string {
return process.env.NOTABILITY_RENDER_ROOT ?? path.join(getDataRoot(), "rendered-pages");
}
function getNotabilityScriptDir(): string {
return path.join(getAgentDir(), "notability");
}
function getSkillPath(skillName: string): string {
return path.join(getAgentDir(), "skills", skillName, "SKILL.md");
}
function stripFrontmatterBlock(text: string): string {
const trimmed = text.trim();
if (!trimmed.startsWith("---\n")) return trimmed;
const end = trimmed.indexOf("\n---\n", 4);
if (end === -1) return trimmed;
return trimmed.slice(end + 5).trim();
}
function stripCodeFence(text: string): string {
const trimmed = text.trim();
const match = trimmed.match(/^```(?:markdown|md)?\n([\s\S]*?)\n```$/i);
return match ? match[1].trim() : trimmed;
}
function parseFrontmatter(text: string): FrontmatterInfo {
const trimmed = stripCodeFence(text);
if (!trimmed.startsWith("---\n")) {
return { values: {}, body: trimmed };
}
const end = trimmed.indexOf("\n---\n", 4);
if (end === -1) {
return { values: {}, body: trimmed };
}
const block = trimmed.slice(4, end);
const body = trimmed.slice(end + 5).trim();
const values: Record<string, string> = {};
for (const line of block.split("\n")) {
const idx = line.indexOf(":");
if (idx === -1) continue;
const key = line.slice(0, idx).trim();
const value = line.slice(idx + 1).trim();
values[key] = value;
}
return { values, body };
}
function quoteYaml(value: string): string {
return JSON.stringify(value);
}
function sha256(content: string | Buffer): string {
return crypto.createHash("sha256").update(content).digest("hex");
}
async function sha256File(filePath: string): Promise<string> {
const buffer = await readFile(filePath);
return sha256(buffer);
}
function extractTitle(normalized: string, fallbackTitle: string): string {
const parsed = parseFrontmatter(normalized);
const frontmatterTitle = parsed.values.title?.replace(/^['"]|['"]$/g, "").trim();
if (frontmatterTitle) return frontmatterTitle;
const heading = parsed.body
.split("\n")
.map((line) => line.trim())
.find((line) => line.startsWith("# "));
if (heading) return heading.replace(/^#\s+/, "").trim();
return fallbackTitle;
}
function sourceFormat(filePath: string): string {
const extension = path.extname(filePath).toLowerCase();
if (extension === ".pdf") return "pdf";
if (extension === ".png") return "png";
return extension.replace(/^\./, "") || "unknown";
}
function buildMarkdown(manifest: IngestManifest, normalized: string): string {
const parsed = parseFrontmatter(normalized);
const title = extractTitle(normalized, manifest.title);
const now = new Date().toISOString().replace(/\.\d{3}Z$/, "Z");
const created = manifest.requested_at.slice(0, 10);
const body = parsed.body.trim();
const outputBody = body.length > 0 ? body : `# ${title}\n`;
return [
"---",
`title: ${quoteYaml(title)}`,
`created: ${quoteYaml(created)}`,
`updated: ${quoteYaml(now.slice(0, 10))}`,
`source: ${quoteYaml("notability")}`,
`source_transport: ${quoteYaml(manifest.source_transport ?? "webdav")}`,
`source_relpath: ${quoteYaml(manifest.source_relpath)}`,
`note_id: ${quoteYaml(manifest.note_id)}`,
`managed_by: ${quoteYaml("notability-ingest")}`,
`source_file: ${quoteYaml(manifest.archive_path)}`,
`source_file_hash: ${quoteYaml(`sha256:${manifest.source_hash}`)}`,
`source_format: ${quoteYaml(sourceFormat(manifest.archive_path))}`,
`status: ${quoteYaml("active")}`,
"tags:",
" - handwritten",
" - notability",
"---",
"",
outputBody,
"",
].join("\n");
}
function conflictPathFor(outputPath: string): string {
const parsed = path.parse(outputPath);
const stamp = new Date().toISOString().replace(/[:]/g, "-").replace(/\.\d{3}Z$/, "Z");
return path.join(parsed.dir, `${parsed.name}.conflict-${stamp}${parsed.ext}`);
}
async function ensureParent(filePath: string): Promise<void> {
await mkdir(path.dirname(filePath), { recursive: true });
}
async function loadSkillText(skillName: string): Promise<string> {
const raw = await readFile(getSkillPath(skillName), "utf8");
return stripFrontmatterBlock(raw).trim();
}
function normalizePathArg(arg: string): string {
return arg.startsWith("@") ? arg.slice(1) : arg;
}
function resolveModel(ctx: ExtensionContext, requireImage = false): Model {
const available = ctx.modelRegistry.getAvailable();
const matching = requireImage ? available.filter((model) => model.input.includes("image")) : available;
if (matching.length === 0) {
throw new Error(
requireImage
? "No image-capable model configured for pi note ingestion"
: "No available model configured for pi note ingestion",
);
}
if (ctx.model && (!requireImage || ctx.model.input.includes("image"))) {
if (!requireImage) return ctx.model;
}
if (requireImage) {
const [provider, id] = PREFERRED_VISION_MODEL;
const preferred = matching.find((model) => model.provider === provider && model.id === id);
if (preferred) return preferred;
const subscriptionModel = matching.find(
(model) => model.provider !== "opencode" && model.provider !== "opencode-go",
);
if (subscriptionModel) return subscriptionModel;
}
if (ctx.model && (!requireImage || ctx.model.input.includes("image"))) {
return ctx.model;
}
return matching[0];
}
async function runSkillPrompt(
ctx: ExtensionContext,
systemPrompt: string,
prompt: string,
images: RenderedPage[] = [],
thinkingLevel: "off" | "low" = "off",
): Promise<string> {
if (images.length > 0) {
const model = resolveModel(ctx, true);
const { execFile } = await import("node:child_process");
const promptPath = path.join(os.tmpdir(), `pi-note-ingest-${crypto.randomUUID()}.md`);
await writeFile(promptPath, `${prompt}\n`);
const args = [
"45s",
"pi",
"--model",
`${model.provider}/${model.id}`,
"--thinking",
thinkingLevel,
"--no-tools",
"--no-session",
"-p",
...images.map((page) => `@${page.path}`),
`@${promptPath}`,
];
try {
const output = await new Promise<string>((resolve, reject) => {
execFile("timeout", args, { cwd: ctx.cwd, env: process.env, maxBuffer: 10 * 1024 * 1024 }, (error, stdout, stderr) => {
if ((stdout ?? "").trim().length > 0) {
resolve(stdout);
return;
}
if (error) {
reject(new Error(stderr || stdout || error.message));
return;
}
resolve(stdout);
});
});
return stripCodeFence(output).trim();
} finally {
try {
fs.unlinkSync(promptPath);
} catch {
// Ignore temp file cleanup failures.
}
}
}
const agentDir = getAgentDir();
const settingsManager = SettingsManager.create(ctx.cwd, agentDir);
const resourceLoader = new DefaultResourceLoader({
cwd: ctx.cwd,
agentDir,
settingsManager,
noExtensions: true,
noPromptTemplates: true,
noThemes: true,
noSkills: true,
systemPromptOverride: () => systemPrompt,
appendSystemPromptOverride: () => [],
agentsFilesOverride: () => ({ agentsFiles: [] }),
});
await resourceLoader.reload();
const { session } = await createAgentSession({
model: resolveModel(ctx, images.length > 0),
thinkingLevel,
sessionManager: SessionManager.inMemory(),
modelRegistry: ctx.modelRegistry,
resourceLoader,
tools: [],
});
let output = "";
const unsubscribe = session.subscribe((event) => {
if (event.type === "message_update" && event.assistantMessageEvent.type === "text_delta") {
output += event.assistantMessageEvent.delta;
}
});
try {
await session.prompt(prompt, {
images: images.map((page) => page.image),
});
} finally {
unsubscribe();
}
if (!output.trim()) {
const assistantMessages = session.messages.filter((message) => message.role === "assistant");
const lastAssistant = assistantMessages.at(-1);
if (lastAssistant && Array.isArray(lastAssistant.content)) {
output = lastAssistant.content
.filter((part) => part.type === "text")
.map((part) => part.text)
.join("");
}
}
session.dispose();
return stripCodeFence(output).trim();
}
async function renderPdfPages(pdfPath: string, jobId: string): Promise<RenderedPage[]> {
const renderDir = path.join(getRenderRoot(), jobId);
await mkdir(renderDir, { recursive: true });
const prefix = path.join(renderDir, "page");
const args = ["-png", "-r", "200", pdfPath, prefix];
const { execFile } = await import("node:child_process");
await new Promise<void>((resolve, reject) => {
execFile("pdftoppm", args, (error) => {
if (error) reject(error);
else resolve();
});
});
const entries = await readdir(renderDir);
const pngs = entries
.filter((entry) => entry.endsWith(".png"))
.sort((left, right) => left.localeCompare(right, undefined, { numeric: true }));
if (pngs.length === 0) {
throw new Error(`No rendered pages produced for ${pdfPath}`);
}
const pages: RenderedPage[] = [];
for (const entry of pngs) {
const pagePath = path.join(renderDir, entry);
const buffer = await readFile(pagePath);
pages.push({
path: pagePath,
image: {
type: "image",
source: {
type: "base64",
mediaType: "image/png",
data: buffer.toString("base64"),
},
},
});
}
return pages;
}
async function loadImagePage(imagePath: string): Promise<RenderedPage> {
const extension = path.extname(imagePath).toLowerCase();
const mediaType = extension === ".png" ? "image/png" : undefined;
if (!mediaType) {
throw new Error(`Unsupported image input format for ${imagePath}`);
}
const buffer = await readFile(imagePath);
return {
path: imagePath,
image: {
type: "image",
source: {
type: "base64",
mediaType,
data: buffer.toString("base64"),
},
},
};
}
async function renderInputPages(inputPath: string, jobId: string): Promise<RenderedPage[]> {
const extension = path.extname(inputPath).toLowerCase();
if (extension === ".pdf") {
return await renderPdfPages(inputPath, jobId);
}
if (extension === ".png") {
return [await loadImagePage(inputPath)];
}
throw new Error(`Unsupported Notability input format: ${inputPath}`);
}
async function findManagedOutputs(noteId: string): Promise<string[]> {
const matches: string[] = [];
const stack = [getNotesRoot()];
while (stack.length > 0) {
const currentDir = stack.pop();
if (!currentDir || !fs.existsSync(currentDir)) continue;
const entries = await readdir(currentDir, { withFileTypes: true });
for (const entry of entries) {
if (entry.name.startsWith(".")) continue;
const fullPath = path.join(currentDir, entry.name);
if (entry.isDirectory()) {
stack.push(fullPath);
continue;
}
if (!entry.isFile() || !entry.name.endsWith(".md")) continue;
try {
const parsed = parseFrontmatter(await readFile(fullPath, "utf8"));
const managedBy = parsed.values.managed_by?.replace(/^['"]|['"]$/g, "");
const frontmatterNoteId = parsed.values.note_id?.replace(/^['"]|['"]$/g, "");
if (managedBy === "notability-ingest" && frontmatterNoteId === noteId) {
matches.push(fullPath);
}
} catch {
// Ignore unreadable or malformed files while scanning the notebook.
}
}
}
return matches.sort();
}
async function resolveManagedOutputPath(noteId: string, configuredOutputPath: string): Promise<string> {
if (fs.existsSync(configuredOutputPath)) {
const parsed = parseFrontmatter(await readFile(configuredOutputPath, "utf8"));
const managedBy = parsed.values.managed_by?.replace(/^['"]|['"]$/g, "");
const frontmatterNoteId = parsed.values.note_id?.replace(/^['"]|['"]$/g, "");
if (managedBy === "notability-ingest" && frontmatterNoteId === noteId) {
return configuredOutputPath;
}
}
const discovered = await findManagedOutputs(noteId);
if (discovered.length === 0) return configuredOutputPath;
if (discovered.length === 1) return discovered[0];
throw new Error(
`Multiple managed note files found for ${noteId}: ${discovered.join(", ")}`,
);
}
async function determineWriteTarget(manifest: IngestManifest, markdown: string): Promise<{
outputPath: string;
writePath: string;
writeMode: "create" | "overwrite" | "force-overwrite" | "conflict";
updatedMainOutput: boolean;
}> {
const outputPath = await resolveManagedOutputPath(manifest.note_id, manifest.output_path);
if (!fs.existsSync(outputPath)) {
return { outputPath, writePath: outputPath, writeMode: "create", updatedMainOutput: true };
}
const existing = await readFile(outputPath, "utf8");
const existingHash = sha256(existing);
const parsed = parseFrontmatter(existing);
const isManaged = parsed.values.managed_by?.replace(/^['"]|['"]$/g, "") === "notability-ingest";
const sameNoteId = parsed.values.note_id?.replace(/^['"]|['"]$/g, "") === manifest.note_id;
if (manifest.last_generated_output_hash && existingHash === manifest.last_generated_output_hash) {
return { outputPath, writePath: outputPath, writeMode: "overwrite", updatedMainOutput: true };
}
if (manifest.force_overwrite_generated && isManaged && sameNoteId) {
return { outputPath, writePath: outputPath, writeMode: "force-overwrite", updatedMainOutput: true };
}
return {
outputPath,
writePath: conflictPathFor(outputPath),
writeMode: "conflict",
updatedMainOutput: false,
};
}
async function writeIngestResult(resultPath: string, payload: IngestResult): Promise<void> {
await ensureParent(resultPath);
await writeFile(resultPath, JSON.stringify(payload, null, 2));
}
async function ingestManifest(manifestPath: string, ctx: ExtensionContext): Promise<IngestResult> {
const manifest = JSON.parse(await readFile(manifestPath, "utf8")) as IngestManifest;
await ensureParent(manifest.transcript_path);
await ensureParent(manifest.result_path);
await mkdir(manifest.session_dir, { recursive: true });
const normalizeSkill = await loadSkillText(NORMALIZE_SKILL);
const pages = await renderInputPages(manifest.input_path, manifest.job_id);
const pageSummary = pages.map((page, index) => `- page ${index + 1}: ${page.path}`).join("\n");
const transcriptPrompt = [
"Transcribe this note into clean Markdown.",
"Read it like a human and preserve the intended reading order and visible structure.",
"Keep headings, lists, and paragraphs when they are visible.",
"Do not summarize. Do not add commentary. Return Markdown only.",
"Rendered pages:",
pageSummary,
].join("\n\n");
let transcript = await runSkillPrompt(
ctx,
"",
transcriptPrompt,
pages,
DEFAULT_TRANSCRIBE_THINKING,
);
if (!transcript.trim()) {
throw new Error("Transcription skill returned empty output");
}
await writeFile(manifest.transcript_path, `${transcript.trim()}\n`);
const normalizePrompt = [
`Note ID: ${manifest.note_id}`,
`Source path: ${manifest.source_relpath}`,
`Preferred output path: ${manifest.output_path}`,
"Normalize the following transcription into clean Markdown.",
"Restore natural prose formatting and intended reading order when the transcription contains OCR or layout artifacts.",
"If words are split across separate lines but clearly belong to the same phrase or sentence, merge them.",
"Return only Markdown. No code fences.",
"",
"<transcription>",
transcript.trim(),
"</transcription>",
].join("\n");
const normalized = await runSkillPrompt(
ctx,
normalizeSkill,
normalizePrompt,
[],
DEFAULT_NORMALIZE_THINKING,
);
if (!normalized.trim()) {
throw new Error("Normalization skill returned empty output");
}
const markdown = buildMarkdown(manifest, normalized);
const target = await determineWriteTarget(manifest, markdown);
await ensureParent(target.writePath);
await writeFile(target.writePath, markdown);
const result: IngestResult = {
success: true,
job_id: manifest.job_id,
note_id: manifest.note_id,
archive_path: manifest.archive_path,
source_hash: manifest.source_hash,
session_dir: manifest.session_dir,
output_path: target.outputPath,
output_hash: target.updatedMainOutput ? await sha256File(target.writePath) : undefined,
conflict_path: target.writeMode === "conflict" ? target.writePath : undefined,
write_mode: target.writeMode,
updated_main_output: target.updatedMainOutput,
transcript_path: manifest.transcript_path,
};
await writeIngestResult(manifest.result_path, result);
return result;
}
async function runScript(scriptName: string, args: string[]): Promise<string> {
const { execFile } = await import("node:child_process");
const scriptPath = path.join(getNotabilityScriptDir(), scriptName);
return await new Promise<string>((resolve, reject) => {
execFile("nu", [scriptPath, ...args], (error, stdout, stderr) => {
if (error) {
reject(new Error(stderr || stdout || error.message));
return;
}
resolve(stdout.trim());
});
});
}
function splitArgs(input: string): string[] {
return input
.trim()
.split(/\s+/)
.filter((part) => part.length > 0);
}
function postStatus(pi: ExtensionAPI, content: string): void {
pi.sendMessage({
customType: STATUS_TYPE,
content,
display: true,
});
}
export default function noteIngestExtension(pi: ExtensionAPI) {
pi.registerMessageRenderer(STATUS_TYPE, (message, _options, theme) => {
const box = new Box(1, 1, (text) => theme.bg("customMessageBg", text));
box.addChild(new Text(message.content, 0, 0));
return box;
});
pi.registerCommand("note-status", {
description: "Show Notability ingest status",
handler: async (args, _ctx) => {
const output = await runScript("status.nu", splitArgs(args));
postStatus(pi, output.length > 0 ? output : "No status output");
},
});
pi.registerCommand("note-reingest", {
description: "Enqueue a note for reingestion",
handler: async (args, _ctx) => {
const trimmed = args.trim();
if (!trimmed) {
postStatus(pi, "Usage: /note-reingest <note-id> [--latest-source|--latest-archive] [--force-overwrite-generated]");
return;
}
const output = await runScript("reingest.nu", splitArgs(trimmed));
postStatus(pi, output.length > 0 ? output : "Reingest enqueued");
},
});
pi.registerCommand("note-ingest", {
description: "Ingest a queued Notability job manifest",
handler: async (args, ctx: ExtensionCommandContext) => {
const manifestPath = normalizePathArg(args.trim());
if (!manifestPath) {
throw new Error("Usage: /note-ingest <job.json>");
}
let resultPath = "";
try {
const raw = await readFile(manifestPath, "utf8");
const manifest = JSON.parse(raw) as IngestManifest;
resultPath = manifest.result_path;
const result = await ingestManifest(manifestPath, ctx);
postStatus(pi, `Ingested ${result.note_id} (${result.write_mode})`);
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
if (resultPath) {
const manifest = JSON.parse(await readFile(manifestPath, "utf8")) as IngestManifest;
await writeIngestResult(resultPath, {
success: false,
job_id: manifest.job_id,
note_id: manifest.note_id,
archive_path: manifest.archive_path,
source_hash: manifest.source_hash,
session_dir: manifest.session_dir,
error: message,
});
}
throw error;
}
},
});
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,260 +0,0 @@
import type { ExtensionAPI, ExtensionContext } from "@mariozechner/pi-coding-agent";
import {
createAgentSession,
DefaultResourceLoader,
getAgentDir,
SessionManager,
SettingsManager,
} from "@mariozechner/pi-coding-agent";
interface SessionNameState {
hasAutoNamed: boolean;
}
const TITLE_MODEL = {
provider: "openai-codex",
id: "gpt-5.4-mini",
} as const;
const MAX_TITLE_LENGTH = 50;
const MAX_RETRIES = 2;
const FALLBACK_LENGTH = 50;
const TITLE_ENTRY_TYPE = "vendored-session-title";
const TITLE_SYSTEM_PROMPT = `You are generating a succinct title for a coding session based on the provided conversation.
Requirements:
- Maximum 50 characters
- Sentence case (capitalize only first word and proper nouns)
- Capture the main intent or task
- Reuse the user's exact words and technical terms
- Match the user's language
- No quotes, colons, or markdown formatting
- No generic titles like "Coding session" or "Help with code"
- No explanations or commentary
Output ONLY the title text. Nothing else.`;
function isTurnCompleted(event: unknown): boolean {
if (!event || typeof event !== "object") return false;
const message = (event as { message?: unknown }).message;
if (!message || typeof message !== "object") return false;
const stopReason = (message as { stopReason?: unknown }).stopReason;
return typeof stopReason === "string" && stopReason.toLowerCase() === "stop";
}
function buildFallbackTitle(userText: string): string {
const text = userText.trim();
if (text.length <= FALLBACK_LENGTH) return text;
const truncated = text.slice(0, FALLBACK_LENGTH - 3);
const lastSpace = truncated.lastIndexOf(" ");
return `${lastSpace > 0 ? truncated.slice(0, lastSpace) : truncated}...`;
}
function postProcessTitle(raw: string): string {
let title = raw;
title = title.replace(/<thinking[\s\S]*?<\/thinking>\s*/g, "");
title = title.replace(/^["'`]+|["'`]+$/g, "");
title = title.replace(/^#+\s*/, "");
title = title.replace(/\*{1,2}(.*?)\*{1,2}/g, "$1");
title = title.replace(/_{1,2}(.*?)_{1,2}/g, "$1");
title = title.replace(/^(Title|Summary|Session)\s*:\s*/i, "");
title =
title
.split("\n")
.map((line) => line.trim())
.find((line) => line.length > 0) ?? title;
title = title.trim();
if (title.length > MAX_TITLE_LENGTH) {
const truncated = title.slice(0, MAX_TITLE_LENGTH - 3);
const lastSpace = truncated.lastIndexOf(" ");
title = `${lastSpace > 0 ? truncated.slice(0, lastSpace) : truncated}...`;
}
return title;
}
function getLatestUserText(ctx: ExtensionContext): string | null {
const entries = ctx.sessionManager.getEntries();
for (let i = entries.length - 1; i >= 0; i -= 1) {
const entry = entries[i];
if (!entry || entry.type !== "message") continue;
if (entry.message.role !== "user") continue;
const { content } = entry.message as { content: unknown };
if (typeof content === "string") return content;
if (!Array.isArray(content)) return null;
return content
.filter(
(part): part is { type: string; text?: string } =>
typeof part === "object" && part !== null && "type" in part,
)
.filter((part) => part.type === "text" && typeof part.text === "string")
.map((part) => part.text ?? "")
.join(" ");
}
return null;
}
function getLatestAssistantText(ctx: ExtensionContext): string | null {
const entries = ctx.sessionManager.getEntries();
for (let i = entries.length - 1; i >= 0; i -= 1) {
const entry = entries[i];
if (!entry || entry.type !== "message") continue;
if (entry.message.role !== "assistant") continue;
const { content } = entry.message as { content: unknown };
if (typeof content === "string") return content;
if (!Array.isArray(content)) return null;
return content
.filter(
(part): part is { type: string; text?: string } =>
typeof part === "object" && part !== null && "type" in part,
)
.filter((part) => part.type === "text" && typeof part.text === "string")
.map((part) => part.text ?? "")
.join("\n");
}
return null;
}
function resolveModel(ctx: ExtensionContext) {
const available = ctx.modelRegistry.getAvailable();
const model = available.find(
(candidate) => candidate.provider === TITLE_MODEL.provider && candidate.id === TITLE_MODEL.id,
);
if (model) return model;
const existsWithoutKey = ctx.modelRegistry
.getAll()
.some((candidate) => candidate.provider === TITLE_MODEL.provider && candidate.id === TITLE_MODEL.id);
if (existsWithoutKey) {
throw new Error(
`Model ${TITLE_MODEL.provider}/${TITLE_MODEL.id} exists but has no configured API key.`,
);
}
throw new Error(`Model ${TITLE_MODEL.provider}/${TITLE_MODEL.id} is not available.`);
}
async function generateTitle(userText: string, assistantText: string, ctx: ExtensionContext): Promise<string> {
const agentDir = getAgentDir();
const settingsManager = SettingsManager.create(ctx.cwd, agentDir);
const resourceLoader = new DefaultResourceLoader({
cwd: ctx.cwd,
agentDir,
settingsManager,
noExtensions: true,
noPromptTemplates: true,
noThemes: true,
noSkills: true,
systemPromptOverride: () => TITLE_SYSTEM_PROMPT,
appendSystemPromptOverride: () => [],
agentsFilesOverride: () => ({ agentsFiles: [] }),
});
await resourceLoader.reload();
const { session } = await createAgentSession({
model: resolveModel(ctx),
thinkingLevel: "off",
sessionManager: SessionManager.inMemory(),
modelRegistry: ctx.modelRegistry,
resourceLoader,
});
let accumulated = "";
const unsubscribe = session.subscribe((event) => {
if (event.type === "message_update" && event.assistantMessageEvent.type === "text_delta") {
accumulated += event.assistantMessageEvent.delta;
}
});
const description = assistantText
? `<user>${userText}</user>\n<assistant>${assistantText}</assistant>`
: `<user>${userText}</user>`;
const userMessage = `<conversation>\n${description}\n</conversation>\n\nGenerate a title:`;
try {
await session.prompt(userMessage);
} finally {
unsubscribe();
session.dispose();
}
return postProcessTitle(accumulated);
}
async function generateAndSetTitle(pi: ExtensionAPI, ctx: ExtensionContext): Promise<void> {
const userText = getLatestUserText(ctx);
if (!userText?.trim()) return;
const assistantText = getLatestAssistantText(ctx) ?? "";
if (!assistantText.trim()) return;
let lastError: Error | null = null;
for (let attempt = 1; attempt <= MAX_RETRIES; attempt += 1) {
try {
const title = await generateTitle(userText, assistantText, ctx);
if (!title) continue;
pi.setSessionName(title);
pi.appendEntry(TITLE_ENTRY_TYPE, {
title,
rawUserText: userText,
rawAssistantText: assistantText,
attempt,
model: `${TITLE_MODEL.provider}/${TITLE_MODEL.id}`,
});
ctx.ui.notify(`Session: ${title}`, "info");
return;
} catch (error) {
lastError = error instanceof Error ? error : new Error(String(error));
}
}
const fallback = buildFallbackTitle(userText);
pi.setSessionName(fallback);
pi.appendEntry(TITLE_ENTRY_TYPE, {
title: fallback,
fallback: true,
error: lastError?.message ?? "Unknown error",
rawUserText: userText,
rawAssistantText: assistantText,
model: `${TITLE_MODEL.provider}/${TITLE_MODEL.id}`,
});
ctx.ui.notify(`Title generation failed, using fallback: ${fallback}`, "warning");
}
export default function setupSessionNameHook(pi: ExtensionAPI) {
const state: SessionNameState = {
hasAutoNamed: false,
};
pi.on("session_start", async () => {
state.hasAutoNamed = false;
});
pi.on("session_switch", async () => {
state.hasAutoNamed = false;
});
pi.on("turn_end", async (event, ctx) => {
if (state.hasAutoNamed) return;
if (pi.getSessionName()) {
state.hasAutoNamed = true;
return;
}
if (!isTurnCompleted(event)) return;
await generateAndSetTitle(pi, ctx);
state.hasAutoNamed = true;
});
}

View File

@@ -1,21 +0,0 @@
{
"mcpServers": {
"opensrc": {
"command": "npx",
"args": ["-y", "opensrc-mcp"],
"lifecycle": "eager"
},
"context7": {
"url": "https://mcp.context7.com/mcp",
"lifecycle": "eager"
},
"grep_app": {
"url": "https://mcp.grep.app",
"lifecycle": "eager"
},
"sentry": {
"url": "https://mcp.sentry.dev/mcp",
"auth": "oauth"
}
}
}

View File

@@ -1,143 +0,0 @@
---
name: jujutsu
description: Manages version control with Jujutsu (jj), including rebasing, conflict resolution, and Git interop. Use when tracking changes, navigating history, squashing/splitting commits, or pushing to Git remotes.
---
# Jujutsu
Git-compatible VCS focused on concurrent development and ease of use.
> ⚠️ **Not Git!** Jujutsu syntax differs from Git:
>
> - Parent: `@-` not `@~1` or `@^`
> - Grandparent: `@--` not `@~2`
> - Child: `@+` not `@~-1`
> - Use `jj log` not `jj changes`
## Key Commands
| Command | Description |
| -------------------------- | -------------------------------------------- |
| `jj st` | Show working copy status |
| `jj log` | Show change log |
| `jj diff` | Show changes in working copy |
| `jj new` | Create new change |
| `jj desc` | Edit change description |
| `jj squash` | Move changes to parent |
| `jj split` | Split current change |
| `jj rebase -s src -d dest` | Rebase changes |
| `jj absorb` | Move changes into stack of mutable revisions |
| `jj bisect` | Find bad revision by bisection |
| `jj fix` | Update files with formatting fixes |
| `jj sign` | Cryptographically sign a revision |
| `jj metaedit` | Modify metadata without changing content |
## Basic Workflow
```bash
jj new # Create new change
jj desc -m "feat: add feature" # Set description
jj log # View history
jj edit change-id # Switch to change
jj new --before @ # Time travel (create before current)
jj edit @- # Go to parent
```
## Time Travel
```bash
jj edit change-id # Switch to specific change
jj next --edit # Next child change
jj edit @- # Parent change
jj new --before @ -m msg # Insert before current
```
## Merging & Rebasing
```bash
jj new x yz -m msg # Merge changes
jj rebase -s src -d dest # Rebase source onto dest
jj abandon # Delete current change
```
## Conflicts
```bash
jj resolve # Interactive conflict resolution
# Edit files, then continue
```
## Revset Syntax
**Parent/child operators:**
| Syntax | Meaning | Example |
| ------ | ---------------- | -------------------- |
| `@-` | Parent of @ | `jj diff -r @-` |
| `@--` | Grandparent | `jj log -r @--` |
| `x-` | Parent of x | `jj diff -r abc123-` |
| `@+` | Child of @ | `jj log -r @+` |
| `x::y` | x to y inclusive | `jj log -r main::@` |
| `x..y` | x to y exclusive | `jj log -r main..@` |
| `x\|y` | Union (or) | `jj log -r 'a \| b'` |
**⚠️ Common mistakes:**
-`@~1` → ✅ `@-` (parent)
-`@^` → ✅ `@-` (parent)
-`@~-1` → ✅ `@+` (child)
-`jj changes` → ✅ `jj log` or `jj diff`
-`a,b,c` → ✅ `a | b | c` (union uses pipe, not comma)
**Functions:**
```bash
jj log -r 'heads(all())' # All heads
jj log -r 'remote_bookmarks()..' # Not on remote
jj log -r 'author(name)' # By author
jj log -r 'description(regex)' # By description
jj log -r 'mine()' # My commits
jj log -r 'committer_date(after:"7 days ago")' # Recent commits
jj log -r 'mine() & committer_date(after:"yesterday")' # My recent
```
## Templates
```bash
jj log -T 'commit_id ++ "\n" ++ description'
```
## Git Interop
```bash
jj bookmark create main -r @ # Create bookmark
jj git push --bookmark main # Push bookmark
jj git fetch # Fetch from remote
jj bookmark track main@origin # Track remote
```
## Advanced Commands
```bash
jj absorb # Auto-move changes to relevant commits in stack
jj bisect start # Start bisection
jj bisect good # Mark current as good
jj bisect bad # Mark current as bad
jj fix # Run configured formatters on files
jj sign -r @ # Sign current revision
jj metaedit -r @ -m "new message" # Edit metadata only
```
## Tips
- No staging: changes are immediate
- Use conventional commits: `type(scope): desc`
- `jj undo` to revert operations
- `jj op log` to see operation history
- Bookmarks are like branches
- `jj absorb` is powerful for fixing up commits in a stack
## Related Skills
- **gh**: GitHub CLI for PRs and issues
- **review**: Code review before committing

View File

@@ -1,36 +0,0 @@
---
name: notability-normalize
description: Normalizes an exact Notability transcription into clean, searchable Markdown while preserving all original content and uncertainty markers. Use after a faithful transcription pass.
---
# Notability Normalize
You are doing a **Markdown normalization** pass on a previously transcribed Notability note.
## Rules
- Do **not** summarize.
- Do **not** remove uncertainty markers such as `[unclear: ...]`.
- Preserve all substantive content from the transcription.
- Clean up only formatting and Markdown structure.
- Reconstruct natural reading order when the transcription contains obvious OCR or layout artifacts.
- Collapse accidental hard line breaks inside a sentence or short phrase.
- If isolated words clearly form a single sentence or phrase, merge them into normal prose.
- Prefer readable Markdown headings, lists, and tables.
- Keep content in the same overall order as the transcription.
- Do not invent content.
- Do not output code fences.
- Output Markdown only.
## Output
- Produce a clean Markdown document.
- Include a top-level `#` heading if the note clearly has a title.
- Use standard Markdown lists and checkboxes.
- Represent tables as Markdown tables when practical.
- Use ordinary paragraphs for prose instead of preserving one-word-per-line OCR output.
- Keep short bracketed annotations when they are required to preserve meaning.
## Important
The source PDF remains the ground truth. When in doubt, preserve ambiguity instead of cleaning it away.

View File

@@ -1,38 +0,0 @@
---
name: notability-transcribe
description: Faithfully transcribes handwritten or mixed handwritten/typed Notability note pages into Markdown without summarizing. Use when converting note page images or PDFs into an exact textual transcription.
---
# Notability Transcribe
You are doing a **faithful transcription** pass for handwritten Notability notes.
## Rules
- Preserve the original order of content.
- Reconstruct the intended reading order from the page layout.
- Read the page in the order a human would: top-to-bottom and left-to-right, while respecting obvious grouping.
- Do **not** summarize, explain, clean up, or reorganize beyond what is necessary to transcribe faithfully.
- Preserve headings, bullets, numbered items, checkboxes, tables, separators, callouts, and obvious layout structure.
- Do **not** preserve accidental OCR-style hard line breaks when the note is clearly continuous prose or a single phrase.
- If words are staggered on the page but clearly belong to the same sentence, combine them into normal lines.
- If text is uncertain, keep the uncertainty inline as `[unclear: ...]`.
- If a word is partially legible, include the best reading and uncertainty marker.
- If there is a drawing or diagram that cannot be represented exactly, describe it minimally in brackets, for example `[diagram: arrow from A to B]`.
- Preserve language exactly as written.
- Do not invent missing words.
- Do not output code fences.
- Output Markdown only.
## Output shape
- Use headings when headings are clearly present.
- Use `- [ ]` or `- [x]` for checkboxes when visible.
- Use bullet lists for bullet lists.
- Use normal paragraphs or single-line phrases for continuous prose instead of one word per line.
- Keep side notes in the position that best preserves reading order.
- Insert blank lines between major sections.
## Safety
If a page is partly unreadable, still transcribe everything you can and mark uncertain content with `[unclear: ...]`.

View File

@@ -56,7 +56,7 @@ def main [pr_number?: int] {
^mkdir -p $base ^mkdir -p $base
print $"Cloning ($repo) PR #($pr.number): ($pr.title)" print $"Cloning ($repo) PR #($pr.number): ($pr.title)"
gh repo clone $repo $dest jj git clone $"https://github.com/($repo).git" $dest
do { do {
cd $dest cd $dest

View File

@@ -1,141 +0,0 @@
#!/usr/bin/env nu
use ./lib.nu *
def active-job-exists [note_id: string, source_hash: string] {
let rows = (sql-json $"
select job_id
from jobs
where note_id = (sql-quote $note_id)
and source_hash = (sql-quote $source_hash)
and status != 'done'
and status != 'failed'
limit 1;
")
not ($rows | is-empty)
}
export def archive-and-version [note_id: string, source_path: path, source_relpath: string, source_size: any, source_mtime: string, source_hash: string] {
let source_size_int = ($source_size | into int)
let archive_path = (archive-path-for $note_id $source_hash $source_relpath)
cp $source_path $archive_path
let version_id = (new-version-id)
let seen_at = (now-iso)
let version_id_q = (sql-quote $version_id)
let note_id_q = (sql-quote $note_id)
let seen_at_q = (sql-quote $seen_at)
let archive_path_q = (sql-quote $archive_path)
let source_hash_q = (sql-quote $source_hash)
let source_mtime_q = (sql-quote $source_mtime)
let source_relpath_q = (sql-quote $source_relpath)
let sql = ([
"insert into versions (version_id, note_id, seen_at, archive_path, source_hash, source_size, source_mtime, source_relpath, ingest_result, session_path) values ("
$version_id_q
", "
$note_id_q
", "
$seen_at_q
", "
$archive_path_q
", "
$source_hash_q
", "
($source_size_int | into string)
", "
$source_mtime_q
", "
$source_relpath_q
", 'pending', null);"
] | str join '')
sql-run $sql | ignore
{
version_id: $version_id
seen_at: $seen_at
archive_path: $archive_path
}
}
export def enqueue-job [
note: record,
operation: string,
input_path: string,
archive_path: string,
source_hash: string,
title: string,
force_overwrite_generated: bool = false,
source_transport: string = 'webdav',
] {
if (active-job-exists $note.note_id $source_hash) {
return null
}
let job_id = (new-job-id)
let requested_at = (now-iso)
let manifest_path = (manifest-path-for $job_id 'queued')
let result_path = (result-path-for $job_id)
let transcript_path = (transcript-path-for $note.note_id $job_id)
let session_dir = ([(sessions-root) $note.note_id $job_id] | path join)
mkdir $session_dir
let manifest = {
version: 1
job_id: $job_id
note_id: $note.note_id
operation: $operation
requested_at: $requested_at
title: $title
source_relpath: $note.source_relpath
source_path: $note.source_path
input_path: $input_path
archive_path: $archive_path
output_path: $note.output_path
transcript_path: $transcript_path
result_path: $result_path
session_dir: $session_dir
source_hash: $source_hash
last_generated_output_hash: ($note.last_generated_output_hash? | default null)
force_overwrite_generated: $force_overwrite_generated
source_transport: $source_transport
}
($manifest | to json --indent 2) | save -f $manifest_path
let job_id_q = (sql-quote $job_id)
let note_id_q = (sql-quote $note.note_id)
let operation_q = (sql-quote $operation)
let requested_at_q = (sql-quote $requested_at)
let source_hash_q = (sql-quote $source_hash)
let manifest_path_q = (sql-quote $manifest_path)
let result_path_q = (sql-quote $result_path)
let sql = ([
"insert into jobs (job_id, note_id, operation, status, requested_at, source_hash, job_manifest_path, result_path) values ("
$job_id_q
", "
$note_id_q
", "
$operation_q
", 'queued', "
$requested_at_q
", "
$source_hash_q
", "
$manifest_path_q
", "
$result_path_q
");"
] | str join '')
sql-run $sql | ignore
{
job_id: $job_id
requested_at: $requested_at
manifest_path: $manifest_path
result_path: $result_path
transcript_path: $transcript_path
session_dir: $session_dir
}
}

View File

@@ -1,433 +0,0 @@
export def home-dir [] {
$nu.home-dir
}
export def data-root [] {
if ('NOTABILITY_DATA_ROOT' in ($env | columns)) {
$env.NOTABILITY_DATA_ROOT
} else {
[$nu.home-dir ".local" "share" "notability-ingest"] | path join
}
}
export def state-root [] {
if ('NOTABILITY_STATE_ROOT' in ($env | columns)) {
$env.NOTABILITY_STATE_ROOT
} else {
[$nu.home-dir ".local" "state" "notability-ingest"] | path join
}
}
export def notes-root [] {
if ('NOTABILITY_NOTES_DIR' in ($env | columns)) {
$env.NOTABILITY_NOTES_DIR
} else {
[$nu.home-dir "Notes"] | path join
}
}
export def webdav-root [] {
if ('NOTABILITY_WEBDAV_ROOT' in ($env | columns)) {
$env.NOTABILITY_WEBDAV_ROOT
} else {
[(data-root) "webdav-root"] | path join
}
}
export def archive-root [] {
if ('NOTABILITY_ARCHIVE_ROOT' in ($env | columns)) {
$env.NOTABILITY_ARCHIVE_ROOT
} else {
[(data-root) "archive"] | path join
}
}
export def render-root [] {
if ('NOTABILITY_RENDER_ROOT' in ($env | columns)) {
$env.NOTABILITY_RENDER_ROOT
} else {
[(data-root) "rendered-pages"] | path join
}
}
export def transcript-root [] {
if ('NOTABILITY_TRANSCRIPT_ROOT' in ($env | columns)) {
$env.NOTABILITY_TRANSCRIPT_ROOT
} else {
[(state-root) "transcripts"] | path join
}
}
export def jobs-root [] {
if ('NOTABILITY_JOBS_ROOT' in ($env | columns)) {
$env.NOTABILITY_JOBS_ROOT
} else {
[(state-root) "jobs"] | path join
}
}
export def queued-root [] {
[(jobs-root) "queued"] | path join
}
export def running-root [] {
[(jobs-root) "running"] | path join
}
export def failed-root [] {
[(jobs-root) "failed"] | path join
}
export def done-root [] {
[(jobs-root) "done"] | path join
}
export def results-root [] {
[(jobs-root) "results"] | path join
}
export def sessions-root [] {
if ('NOTABILITY_SESSIONS_ROOT' in ($env | columns)) {
$env.NOTABILITY_SESSIONS_ROOT
} else {
[(state-root) "sessions"] | path join
}
}
export def qmd-dirty-file [] {
[(state-root) "qmd-dirty"] | path join
}
export def db-path [] {
if ('NOTABILITY_DB_PATH' in ($env | columns)) {
$env.NOTABILITY_DB_PATH
} else {
[(state-root) "db.sqlite"] | path join
}
}
export def now-iso [] {
date now | format date "%Y-%m-%dT%H:%M:%SZ"
}
export def sql-quote [value?: any] {
if $value == null {
"NULL"
} else {
let text = ($value | into string | str replace -a "'" "''")
["'" $text "'"] | str join ''
}
}
export def sql-run [sql: string] {
let database = (db-path)
let result = (^sqlite3 -cmd '.timeout 5000' $database $sql | complete)
if $result.exit_code != 0 {
error make {
msg: $"sqlite3 failed: ($result.stderr | str trim)"
}
}
$result.stdout
}
export def sql-json [sql: string] {
let database = (db-path)
let result = (^sqlite3 -cmd '.timeout 5000' -json $database $sql | complete)
if $result.exit_code != 0 {
error make {
msg: $"sqlite3 failed: ($result.stderr | str trim)"
}
}
let text = ($result.stdout | str trim)
if $text == "" {
[]
} else {
$text | from json
}
}
export def ensure-layout [] {
mkdir (data-root)
mkdir (state-root)
mkdir (notes-root)
mkdir (webdav-root)
mkdir (archive-root)
mkdir (render-root)
mkdir (transcript-root)
mkdir (jobs-root)
mkdir (queued-root)
mkdir (running-root)
mkdir (failed-root)
mkdir (done-root)
mkdir (results-root)
mkdir (sessions-root)
sql-run '
create table if not exists notes (
note_id text primary key,
source_relpath text not null unique,
title text not null,
output_path text not null,
status text not null,
first_seen_at text not null,
last_seen_at text not null,
last_processed_at text,
missing_since text,
deleted_at text,
current_source_hash text,
current_source_size integer,
current_source_mtime text,
current_archive_path text,
latest_version_id text,
last_generated_source_hash text,
last_generated_output_hash text,
conflict_path text,
last_error text
);
create table if not exists versions (
version_id text primary key,
note_id text not null,
seen_at text not null,
archive_path text not null unique,
source_hash text not null,
source_size integer not null,
source_mtime text not null,
source_relpath text not null,
ingest_result text,
session_path text,
foreign key (note_id) references notes (note_id)
);
create table if not exists jobs (
job_id text primary key,
note_id text not null,
operation text not null,
status text not null,
requested_at text not null,
started_at text,
finished_at text,
source_hash text,
job_manifest_path text not null,
result_path text not null,
error_summary text,
foreign key (note_id) references notes (note_id)
);
create table if not exists events (
id integer primary key autoincrement,
note_id text not null,
ts text not null,
kind text not null,
details text,
foreign key (note_id) references notes (note_id)
);
create index if not exists idx_jobs_status_requested_at on jobs(status, requested_at);
create index if not exists idx_versions_note_id_seen_at on versions(note_id, seen_at);
create index if not exists idx_events_note_id_ts on events(note_id, ts);
'
| ignore
}
export def log-event [note_id: string, kind: string, details?: any] {
let payload = if $details == null { null } else { $details | to json }
let note_id_q = (sql-quote $note_id)
let now_q = (sql-quote (now-iso))
let kind_q = (sql-quote $kind)
let payload_q = (sql-quote $payload)
let sql = ([
"insert into events (note_id, ts, kind, details) values ("
$note_id_q
", "
$now_q
", "
$kind_q
", "
$payload_q
");"
] | str join '')
sql-run $sql | ignore
}
export def slugify [value: string] {
let slug = (
$value
| str downcase
| str replace -r '[^a-z0-9]+' '-'
| str replace -r '^-+' ''
| str replace -r '-+$' ''
)
if $slug == '' {
'note'
} else {
$slug
}
}
export def sha256 [file: path] {
(^sha256sum $file | lines | first | split row ' ' | first)
}
export def parse-output-frontmatter [file: path] {
if not ($file | path exists) {
{}
} else {
let content = (open --raw $file)
if not ($content | str starts-with "---\n") {
{}
} else {
let rest = ($content | str substring 4..)
let end = ($rest | str index-of "\n---\n")
if $end == null {
{}
} else {
let block = ($rest | str substring 0..($end - 1))
$block
| lines
| where ($it | str contains ':')
| reduce --fold {} {|line, acc|
let idx = ($line | str index-of ':')
if $idx == null {
$acc
} else {
let key = ($line | str substring 0..($idx - 1) | str trim)
let value = ($line | str substring ($idx + 1).. | str trim)
$acc | upsert $key $value
}
}
}
}
}
}
export def zk-generated-note-path [title: string] {
let root = (notes-root)
let effective_title = if ($title | str trim) == '' {
'Imported note'
} else {
$title
}
let result = (
^zk --notebook-dir $root --working-dir $root new $root --no-input --title $effective_title --print-path --dry-run
| complete
)
if $result.exit_code != 0 {
error make {
msg: $"zk failed to generate a note path: ($result.stderr | str trim)"
}
}
let path_text = ($result.stderr | str trim)
if $path_text == '' {
error make {
msg: 'zk did not return a generated note path'
}
}
$path_text
| lines
| last
| str trim
}
export def new-note-id [] {
let suffix = (random uuid | str replace -a '-' '')
$"ntl_($suffix)"
}
export def new-job-id [] {
let suffix = (random uuid | str replace -a '-' '')
$"job_($suffix)"
}
export def new-version-id [] {
let suffix = (random uuid | str replace -a '-' '')
$"ver_($suffix)"
}
export def archive-path-for [note_id: string, source_hash: string, source_relpath: string] {
let stamp = (date now | format date "%Y-%m-%dT%H-%M-%SZ")
let short = ($source_hash | str substring 0..11)
let directory = [(archive-root) $note_id] | path join
let parsed = ($source_relpath | path parse)
let extension = if (($parsed.extension? | default '') | str trim) == '' {
'bin'
} else {
($parsed.extension | str downcase)
}
mkdir $directory
[$directory $"($stamp)-($short).($extension)"] | path join
}
export def transcript-path-for [note_id: string, job_id: string] {
let directory = [(transcript-root) $note_id] | path join
mkdir $directory
[$directory $"($job_id).md"] | path join
}
export def result-path-for [job_id: string] {
[(results-root) $"($job_id).json"] | path join
}
export def manifest-path-for [job_id: string, status: string] {
let root = match $status {
'queued' => (queued-root)
'running' => (running-root)
'failed' => (failed-root)
'done' => (done-root)
_ => (queued-root)
}
[$root $"($job_id).json"] | path join
}
export def note-output-path [title: string] {
zk-generated-note-path $title
}
export def is-supported-source-path [path: string] {
let lower = ($path | str downcase)
(($lower | str ends-with '.pdf') or ($lower | str ends-with '.png'))
}
export def is-ignored-path [relpath: string] {
let lower = ($relpath | str downcase)
let hidden = (($lower | str contains '/.') or ($lower | str starts-with '.'))
let temp = (($lower | str contains '/~') or ($lower | str ends-with '.tmp') or ($lower | str ends-with '.part'))
let conflict = ($lower | str contains '.sync-conflict')
($hidden or $temp or $conflict)
}
export def scan-source-files [] {
let root = (webdav-root)
if not ($root | path exists) {
[]
} else {
let files = ([
(glob $"($root)/**/*.pdf")
(glob $"($root)/**/*.PDF")
(glob $"($root)/**/*.png")
(glob $"($root)/**/*.PNG")
] | flatten)
$files
| sort
| uniq
| each {|file|
let relpath = ($file | path relative-to $root)
if ((is-ignored-path $relpath) or not (is-supported-source-path $file)) {
null
} else {
let stat = (ls -l $file | first)
{
source_path: $file
source_relpath: $relpath
source_size: $stat.size
source_mtime: ($stat.modified | format date "%Y-%m-%dT%H:%M:%SZ")
title: (($relpath | path parse).stem)
}
}
}
| where $it != null
}
}

View File

@@ -1,387 +0,0 @@
#!/usr/bin/env nu
use ./lib.nu *
use ./jobs.nu [archive-and-version, enqueue-job]
const settle_window = 45sec
const delete_grace = 15min
def settle-remaining [source_mtime: string] {
let modified = ($source_mtime | into datetime)
let age = ((date now) - $modified)
if $age >= $settle_window {
0sec
} else {
$settle_window - $age
}
}
def is-settled [source_mtime: string] {
let modified = ($source_mtime | into datetime)
((date now) - $modified) >= $settle_window
}
def log-job-enqueued [note_id: string, job_id: string, operation: string, source_hash: string, archive_path: string] {
log-event $note_id 'job-enqueued' {
job_id: $job_id
operation: $operation
source_hash: $source_hash
archive_path: $archive_path
}
}
def find-rename-candidate [source_hash: string] {
sql-json $"
select *
from notes
where current_source_hash = (sql-quote $source_hash)
and status != 'active'
and status != 'failed'
and status != 'conflict'
order by last_seen_at desc
limit 1;
"
}
def touch-note [note_id: string, source_size: any, source_mtime: string, status: string = 'active'] {
let source_size_int = ($source_size | into int)
let now_q = (sql-quote (now-iso))
let source_mtime_q = (sql-quote $source_mtime)
let status_q = (sql-quote $status)
let note_id_q = (sql-quote $note_id)
sql-run $"
update notes
set last_seen_at = ($now_q),
current_source_size = ($source_size_int),
current_source_mtime = ($source_mtime_q),
status = ($status_q)
where note_id = ($note_id_q);
"
| ignore
}
def process-existing [note: record, source: record] {
let title = $source.title
let note_id = ($note | get note_id)
let note_status = ($note | get status)
let source_size_int = ($source.source_size | into int)
if not (is-settled $source.source_mtime) {
touch-note $note_id $source_size_int $source.source_mtime $note_status
return
}
let previous_size = ($note.current_source_size? | default (-1))
let previous_mtime = ($note.current_source_mtime? | default '')
let size_changed = ($previous_size != $source_size_int)
let mtime_changed = ($previous_mtime != $source.source_mtime)
let needs_ingest = (($note.last_generated_source_hash? | default '') != ($note.current_source_hash? | default ''))
let hash_needed = ($note.current_source_hash? | default null) == null or $size_changed or $mtime_changed or ($note_status != 'active') or $needs_ingest
if not $hash_needed {
let now_q = (sql-quote (now-iso))
let title_q = (sql-quote $title)
let note_id_q = (sql-quote $note_id)
sql-run $"
update notes
set last_seen_at = ($now_q),
status = 'active',
title = ($title_q),
missing_since = null,
deleted_at = null
where note_id = ($note_id_q);
"
| ignore
return
}
let source_hash = (sha256 $source.source_path)
if ($source_hash == ($note.current_source_hash? | default '')) {
let now_q = (sql-quote (now-iso))
let title_q = (sql-quote $title)
let source_mtime_q = (sql-quote $source.source_mtime)
let note_id_q = (sql-quote $note_id)
let next_status = if $note_status == 'failed' { 'failed' } else { 'active' }
sql-run $"
update notes
set last_seen_at = ($now_q),
title = ($title_q),
status = (sql-quote $next_status),
missing_since = null,
deleted_at = null,
current_source_size = ($source_size_int),
current_source_mtime = ($source_mtime_q)
where note_id = ($note_id_q);
"
| ignore
let should_enqueue = ($note_status == 'failed' or (($note.last_generated_source_hash? | default '') != $source_hash))
if not $should_enqueue {
return
}
let archive_path = if (($note.current_archive_path? | default '') | str trim) == '' {
let version = (archive-and-version $note_id $source.source_path $source.source_relpath $source_size_int $source.source_mtime $source_hash)
let archive_path_q = (sql-quote $version.archive_path)
let version_id_q = (sql-quote $version.version_id)
sql-run $"
update notes
set current_archive_path = ($archive_path_q),
latest_version_id = ($version_id_q)
where note_id = ($note_id_q);
"
| ignore
$version.archive_path
} else {
$note.current_archive_path
}
let runtime_note = ($note | upsert source_path $source.source_path | upsert source_relpath $source.source_relpath | upsert output_path $note.output_path | upsert last_generated_output_hash ($note.last_generated_output_hash? | default null))
let retry_job = (enqueue-job $runtime_note 'upsert' $archive_path $archive_path $source_hash $title)
if $retry_job != null {
log-job-enqueued $note_id $retry_job.job_id 'upsert' $source_hash $archive_path
let reason = if $note_status == 'failed' {
'retry-failed-note'
} else {
'missing-generated-output'
}
log-event $note_id 'job-reenqueued' {
job_id: $retry_job.job_id
reason: $reason
source_hash: $source_hash
archive_path: $archive_path
}
}
return
}
let version = (archive-and-version $note_id $source.source_path $source.source_relpath $source_size_int $source.source_mtime $source_hash)
let now_q = (sql-quote (now-iso))
let title_q = (sql-quote $title)
let source_hash_q = (sql-quote $source_hash)
let source_mtime_q = (sql-quote $source.source_mtime)
let archive_path_q = (sql-quote $version.archive_path)
let version_id_q = (sql-quote $version.version_id)
let note_id_q = (sql-quote $note_id)
sql-run $"
update notes
set last_seen_at = ($now_q),
title = ($title_q),
status = 'active',
missing_since = null,
deleted_at = null,
current_source_hash = ($source_hash_q),
current_source_size = ($source_size_int),
current_source_mtime = ($source_mtime_q),
current_archive_path = ($archive_path_q),
latest_version_id = ($version_id_q),
last_error = null
where note_id = ($note_id_q);
"
| ignore
let runtime_note = ($note | upsert source_path $source.source_path | upsert source_relpath $source.source_relpath | upsert output_path $note.output_path | upsert last_generated_output_hash ($note.last_generated_output_hash? | default null))
let job = (enqueue-job $runtime_note 'upsert' $version.archive_path $version.archive_path $source_hash $title)
if $job != null {
log-job-enqueued $note_id $job.job_id 'upsert' $source_hash $version.archive_path
}
log-event $note_id 'source-updated' {
source_relpath: $source.source_relpath
source_hash: $source_hash
archive_path: $version.archive_path
}
}
def process-new [source: record] {
if not (is-settled $source.source_mtime) {
return
}
let source_hash = (sha256 $source.source_path)
let source_size_int = ($source.source_size | into int)
let rename_candidates = (find-rename-candidate $source_hash)
if not ($rename_candidates | is-empty) {
let rename_candidate = ($rename_candidates | first)
let source_relpath_q = (sql-quote $source.source_relpath)
let title_q = (sql-quote $source.title)
let now_q = (sql-quote (now-iso))
let source_mtime_q = (sql-quote $source.source_mtime)
let note_id_q = (sql-quote $rename_candidate.note_id)
sql-run $"
update notes
set source_relpath = ($source_relpath_q),
title = ($title_q),
last_seen_at = ($now_q),
status = 'active',
missing_since = null,
deleted_at = null,
current_source_size = ($source_size_int),
current_source_mtime = ($source_mtime_q)
where note_id = ($note_id_q);
"
| ignore
log-event $rename_candidate.note_id 'source-renamed' {
from: $rename_candidate.source_relpath
to: $source.source_relpath
}
return
}
let note_id = (new-note-id)
let first_seen_at = (now-iso)
let output_path = (note-output-path $source.title)
let version = (archive-and-version $note_id $source.source_path $source.source_relpath $source_size_int $source.source_mtime $source_hash)
let note_id_q = (sql-quote $note_id)
let source_relpath_q = (sql-quote $source.source_relpath)
let title_q = (sql-quote $source.title)
let output_path_q = (sql-quote $output_path)
let first_seen_q = (sql-quote $first_seen_at)
let source_hash_q = (sql-quote $source_hash)
let source_mtime_q = (sql-quote $source.source_mtime)
let archive_path_q = (sql-quote $version.archive_path)
let version_id_q = (sql-quote $version.version_id)
let sql = ([
"insert into notes (note_id, source_relpath, title, output_path, status, first_seen_at, last_seen_at, current_source_hash, current_source_size, current_source_mtime, current_archive_path, latest_version_id) values ("
$note_id_q
", "
$source_relpath_q
", "
$title_q
", "
$output_path_q
", 'active', "
$first_seen_q
", "
$first_seen_q
", "
$source_hash_q
", "
($source_size_int | into string)
", "
$source_mtime_q
", "
$archive_path_q
", "
$version_id_q
");"
] | str join '')
sql-run $sql | ignore
let note = {
note_id: $note_id
source_relpath: $source.source_relpath
source_path: $source.source_path
output_path: $output_path
last_generated_output_hash: null
}
let job = (enqueue-job $note 'upsert' $version.archive_path $version.archive_path $source_hash $source.title)
if $job != null {
log-job-enqueued $note_id $job.job_id 'upsert' $source_hash $version.archive_path
}
log-event $note_id 'source-discovered' {
source_relpath: $source.source_relpath
source_hash: $source_hash
archive_path: $version.archive_path
output_path: $output_path
}
}
def mark-missing [seen_relpaths: list<string>] {
let notes = (sql-json 'select note_id, source_relpath, status, missing_since from notes;')
for note in $notes {
if ($seen_relpaths | any {|rel| $rel == $note.source_relpath }) {
continue
}
if $note.status == 'active' {
let missing_since = (now-iso)
let missing_since_q = (sql-quote $missing_since)
let note_id_q = (sql-quote $note.note_id)
sql-run $"
update notes
set status = 'source_missing',
missing_since = ($missing_since_q)
where note_id = ($note_id_q);
"
| ignore
log-event $note.note_id 'source-missing' {
source_relpath: $note.source_relpath
}
continue
}
if $note.status == 'source_missing' and ($note.missing_since? | default null) != null {
let missing_since = ($note.missing_since | into datetime)
if ((date now) - $missing_since) >= $delete_grace {
let deleted_at = (now-iso)
let deleted_at_q = (sql-quote $deleted_at)
let note_id_q = (sql-quote $note.note_id)
sql-run $"
update notes
set status = 'source_deleted',
deleted_at = ($deleted_at_q)
where note_id = ($note_id_q);
"
| ignore
log-event $note.note_id 'source-deleted' {
source_relpath: $note.source_relpath
}
}
}
}
}
export def reconcile-run [] {
ensure-layout
mut sources = (scan-source-files)
let unsettled = (
$sources
| each {|source|
{
source_path: $source.source_path
remaining: (settle-remaining $source.source_mtime)
}
}
| where remaining > 0sec
)
if not ($unsettled | is-empty) {
let max_remaining = ($unsettled | get remaining | math max)
print $"Waiting ($max_remaining) for recent Notability uploads to settle"
sleep ($max_remaining + 2sec)
$sources = (scan-source-files)
}
for source in $sources {
let existing_rows = (sql-json $"
select *
from notes
where source_relpath = (sql-quote $source.source_relpath)
limit 1;
")
if (($existing_rows | length) == 0) {
process-new $source
} else {
let existing = ($existing_rows | first)
process-existing ($existing | upsert source_path $source.source_path) $source
}
}
mark-missing ($sources | get source_relpath)
}
def main [] {
reconcile-run
}

View File

@@ -1,148 +0,0 @@
#!/usr/bin/env nu
use ./lib.nu *
use ./jobs.nu [archive-and-version, enqueue-job]
use ./worker.nu [worker-run]
def latest-version [note_id: string] {
sql-json $"
select *
from versions
where note_id = (sql-quote $note_id)
order by seen_at desc
limit 1;
"
| first
}
def existing-active-job [note_id: string, source_hash: string] {
sql-json $"
select job_id
from jobs
where note_id = (sql-quote $note_id)
and source_hash = (sql-quote $source_hash)
and status != 'done'
and status != 'failed'
order by requested_at desc
limit 1;
"
| first
}
def archive-current-source [note: record] {
if not ($note.source_path | path exists) {
error make {
msg: $"Current source path is missing: ($note.source_path)"
}
}
let source_hash = (sha256 $note.source_path)
let source_size = (((ls -l $note.source_path | first).size) | into int)
let source_mtime = (((ls -l $note.source_path | first).modified) | format date "%Y-%m-%dT%H:%M:%SZ")
let version = (archive-and-version $note.note_id $note.source_path $note.source_relpath $source_size $source_mtime $source_hash)
sql-run $"
update notes
set current_source_hash = (sql-quote $source_hash),
current_source_size = ($source_size),
current_source_mtime = (sql-quote $source_mtime),
current_archive_path = (sql-quote $version.archive_path),
latest_version_id = (sql-quote $version.version_id),
last_seen_at = (sql-quote (now-iso)),
status = 'active',
missing_since = null,
deleted_at = null
where note_id = (sql-quote $note.note_id);
"
| ignore
{
input_path: $version.archive_path
archive_path: $version.archive_path
source_hash: $source_hash
}
}
def enqueue-reingest-job [note: record, source_hash: string, input_path: string, archive_path: string, force_overwrite_generated: bool] {
let job = (enqueue-job $note 'reingest' $input_path $archive_path $source_hash $note.title $force_overwrite_generated)
if $job == null {
let existing = (existing-active-job $note.note_id $source_hash)
print $"Already queued: ($existing.job_id? | default 'unknown')"
return
}
log-event $note.note_id 'reingest-enqueued' {
job_id: $job.job_id
source_hash: $source_hash
archive_path: $archive_path
force_overwrite_generated: $force_overwrite_generated
}
print $"Enqueued ($job.job_id) for ($note.note_id)"
try {
worker-run --drain
} catch {|error|
error make {
msg: (($error.msg? | default ($error | to nuon)) | into string)
}
}
}
def main [note_id: string, --latest-source, --latest-archive, --force-overwrite-generated] {
ensure-layout
let note_row = (sql-json $"
select *
from notes
where note_id = (sql-quote $note_id)
limit 1;
" | first)
let note = if $note_row == null {
null
} else {
$note_row | upsert source_path ([ (webdav-root) $note_row.source_relpath ] | path join)
}
if $note == null {
error make {
msg: $"Unknown note id: ($note_id)"
}
}
if $latest_source and $latest_archive {
error make {
msg: 'Choose only one of --latest-source or --latest-archive'
}
}
let source_mode = if $latest_source {
'source'
} else if $latest_archive {
'archive'
} else if ($note.status == 'active' and ($note.source_path | path exists)) {
'source'
} else {
'archive'
}
if $source_mode == 'source' {
let archived = (archive-current-source $note)
enqueue-reingest-job $note $archived.source_hash $archived.input_path $archived.archive_path $force_overwrite_generated
return
}
let version = (latest-version $note.note_id)
if $version == null {
error make {
msg: $"No archived version found for ($note.note_id)"
}
}
enqueue-reingest-job $note $version.source_hash $version.archive_path $version.archive_path $force_overwrite_generated
}

View File

@@ -1,202 +0,0 @@
#!/usr/bin/env nu
use ./lib.nu *
def format-summary [] {
let counts = (sql-json '
select status, count(*) as count
from notes
group by status
order by status;
')
let queue = (sql-json "
select status, count(*) as count
from jobs
where status in ('queued', 'running', 'failed')
group by status
order by status;
")
let lines = [
$"notes db: (db-path)"
$"webdav root: (webdav-root)"
$"notes root: (notes-root)"
''
'notes:'
]
let note_statuses = ('active,source_missing,source_deleted,conflict,failed' | split row ',')
let note_lines = (
$note_statuses
| each {|status|
let row = ($counts | where {|row| ($row | get 'status') == $status } | first)
let count = ($row.count? | default 0)
$" ($status): ($count)"
}
)
let job_statuses = ('queued,running,failed' | split row ',')
let job_lines = (
$job_statuses
| each {|status|
let row = ($queue | where {|row| ($row | get 'status') == $status } | first)
let count = ($row.count? | default 0)
$" ($status): ($count)"
}
)
($lines ++ $note_lines ++ ['' 'jobs:'] ++ $job_lines ++ ['']) | str join "\n"
}
def format-note [note_id: string] {
let note = (sql-json $"
select *
from notes
where note_id = (sql-quote $note_id)
limit 1;
" | first)
if $note == null {
error make {
msg: $"Unknown note id: ($note_id)"
}
}
let jobs = (sql-json $"
select job_id, operation, status, requested_at, started_at, finished_at, source_hash, error_summary
from jobs
where note_id = (sql-quote $note_id)
order by requested_at desc
limit 5;
")
let events = (sql-json $"
select ts, kind, details
from events
where note_id = (sql-quote $note_id)
order by ts desc
limit 10;
")
let output_exists = ($note.output_path | path exists)
let frontmatter = (parse-output-frontmatter $note.output_path)
let lines = [
$"note_id: ($note.note_id)"
$"title: ($note.title)"
$"status: ($note.status)"
$"source_relpath: ($note.source_relpath)"
$"output_path: ($note.output_path)"
$"output_exists: ($output_exists)"
$"managed_by: ($frontmatter.managed_by? | default '')"
$"frontmatter_note_id: ($frontmatter.note_id? | default '')"
$"current_source_hash: ($note.current_source_hash? | default '')"
$"last_generated_output_hash: ($note.last_generated_output_hash? | default '')"
$"current_archive_path: ($note.current_archive_path? | default '')"
$"last_processed_at: ($note.last_processed_at? | default '')"
$"missing_since: ($note.missing_since? | default '')"
$"deleted_at: ($note.deleted_at? | default '')"
$"conflict_path: ($note.conflict_path? | default '')"
$"last_error: ($note.last_error? | default '')"
''
'recent jobs:'
]
let job_lines = if ($jobs | is-empty) {
[' (none)']
} else {
$jobs | each {|job|
$" ($job.job_id) [($job.status)] ($job.operation) requested=($job.requested_at) error=($job.error_summary? | default '')"
}
}
let event_lines = if ($events | is-empty) {
[' (none)']
} else {
$events | each {|event|
$" ($event.ts) ($event.kind) ($event.details? | default '')"
}
}
($lines ++ $job_lines ++ ['' 'recent events:'] ++ $event_lines ++ ['']) | str join "\n"
}
def format-filtered [status: string, label: string] {
let notes = (sql-json $"
select note_id, title, source_relpath, output_path, status, last_error, conflict_path
from notes
where status = (sql-quote $status)
order by last_seen_at desc;
")
let header = [$label]
let body = if ($notes | is-empty) {
[' (none)']
} else {
$notes | each {|note|
let extra = if $status == 'conflict' {
$" conflict_path=($note.conflict_path? | default '')"
} else if $status == 'failed' {
$" last_error=($note.last_error? | default '')"
} else {
''
}
$" ($note.note_id) ($note.title) [($note.status)] source=($note.source_relpath) output=($note.output_path)($extra)"
}
}
($header ++ $body ++ ['']) | str join "\n"
}
def format-queue [] {
let jobs = (sql-json "
select job_id, note_id, operation, status, requested_at, started_at, error_summary
from jobs
where status in ('queued', 'running', 'failed')
order by requested_at asc;
")
let lines = if ($jobs | is-empty) {
['queue' ' (empty)' '']
} else {
['queue'] ++ ($jobs | each {|job|
$" ($job.job_id) note=($job.note_id) [($job.status)] ($job.operation) requested=($job.requested_at) error=($job.error_summary? | default '')"
}) ++ ['']
}
$lines | str join "\n"
}
def main [note_id?: string, --failed, --queue, --deleted, --conflicts] {
ensure-layout
if $queue {
print (format-queue)
return
}
if $failed {
print (format-filtered 'failed' 'failed notes')
return
}
if $deleted {
print (format-filtered 'source_deleted' 'deleted notes')
return
}
if $conflicts {
print (format-filtered 'conflict' 'conflict notes')
return
}
if $note_id != null {
print (format-note $note_id)
return
}
print (format-summary)
}

View File

@@ -1,58 +0,0 @@
#!/usr/bin/env nu
use ./lib.nu *
use ./reconcile.nu [reconcile-run]
use ./worker.nu [worker-run]
def error-message [error: any] {
let msg = (($error.msg? | default '') | into string)
if $msg == '' {
$error | to nuon
} else {
$msg
}
}
def run-worker [] {
try {
worker-run --drain
} catch {|error|
print $"worker failed: (error-message $error)"
}
}
def run-sync [] {
run-worker
try {
reconcile-run
} catch {|error|
print $"reconcile failed: (error-message $error)"
return
}
run-worker
}
def main [] {
ensure-layout
let root = (webdav-root)
print $"Watching ($root) for Notability WebDAV updates"
run-sync
^inotifywait -m -r --format '%w%f' -e create -e close_write -e moved_to -e moved_from -e delete -e attrib $root
| lines
| each {|changed_path|
if not (is-supported-source-path $changed_path) {
return
}
print $"Filesystem event for ($changed_path)"
run-sync
}
}

View File

@@ -1,36 +0,0 @@
#!/usr/bin/env nu
use ./lib.nu *
def main [] {
ensure-layout
let root = (webdav-root)
let addr = if ('NOTABILITY_WEBDAV_ADDR' in ($env | columns)) {
$env.NOTABILITY_WEBDAV_ADDR
} else {
'127.0.0.1:9980'
}
let user = if ('NOTABILITY_WEBDAV_USER' in ($env | columns)) {
$env.NOTABILITY_WEBDAV_USER
} else {
'notability'
}
let baseurl = if ('NOTABILITY_WEBDAV_BASEURL' in ($env | columns)) {
$env.NOTABILITY_WEBDAV_BASEURL
} else {
'/'
}
let password_file = if ('NOTABILITY_WEBDAV_PASSWORD_FILE' in ($env | columns)) {
$env.NOTABILITY_WEBDAV_PASSWORD_FILE
} else {
error make {
msg: 'NOTABILITY_WEBDAV_PASSWORD_FILE is required'
}
}
let password = (open --raw $password_file | str trim)
print $"Starting WebDAV on ($addr), serving ($root), base URL ($baseurl)"
run-external rclone 'serve' 'webdav' $root '--addr' $addr '--baseurl' $baseurl '--user' $user '--pass' $password
}

View File

@@ -1,506 +0,0 @@
#!/usr/bin/env nu
use ./lib.nu *
const qmd_debounce = 1min
const idle_sleep = 10sec
const vision_model = 'openai-codex/gpt-5.4'
const transcribe_timeout = '90s'
const normalize_timeout = '60s'
def next-queued-job [] {
sql-json "
select job_id, note_id, operation, job_manifest_path, result_path, source_hash
from jobs
where status = 'queued'
order by requested_at asc
limit 1;
"
| first
}
def maybe-update-qmd [] {
let dirty = (qmd-dirty-file)
if not ($dirty | path exists) {
return
}
let modified = ((ls -l $dirty | first).modified)
if ((date now) - $modified) < $qmd_debounce {
return
}
print 'Running qmd update'
let result = (do {
cd (notes-root)
run-external qmd 'update' | complete
})
if $result.exit_code != 0 {
print $"qmd update failed: ($result.stderr | str trim)"
return
}
rm -f $dirty
}
def write-result [result_path: path, payload: record] {
mkdir ($result_path | path dirname)
($payload | to json --indent 2) | save -f $result_path
}
def error-message [error: any] {
let msg = (($error.msg? | default '') | into string)
if ($msg == '' or $msg == 'External command failed') {
$error | to nuon
} else {
$msg
}
}
def unquote [value?: any] {
if $value == null {
''
} else {
($value | into string | str replace -r '^"(.*)"$' '$1' | str replace -r "^'(.*)'$" '$1')
}
}
def source-format [file: path] {
(([$file] | path parse | first).extension? | default 'bin' | str downcase)
}
def conflict-path-for [output_path: path] {
let parsed = ([$output_path] | path parse | first)
let stamp = ((date now) | format date '%Y-%m-%dT%H-%M-%SZ')
[$parsed.parent $"($parsed.stem).conflict-($stamp).($parsed.extension)"] | path join
}
def find-managed-outputs [note_id: string] {
let root = (notes-root)
if not ($root | path exists) {
[]
} else {
(glob $"($root)/**/*.md")
| where not ($it | str contains '/.')
| where {|file|
let parsed = (parse-output-frontmatter $file)
(unquote ($parsed.managed_by? | default '')) == 'notability-ingest' and (unquote ($parsed.note_id? | default '')) == $note_id
}
| sort
}
}
def resolve-managed-output-path [note_id: string, configured_output_path: path] {
if ($configured_output_path | path exists) {
let parsed = (parse-output-frontmatter $configured_output_path)
let managed_by = (unquote ($parsed.managed_by? | default ''))
let frontmatter_note_id = (unquote ($parsed.note_id? | default ''))
if ($managed_by == 'notability-ingest' and $frontmatter_note_id == $note_id) {
return $configured_output_path
}
}
let discovered = (find-managed-outputs $note_id)
if ($discovered | is-empty) {
$configured_output_path
} else if (($discovered | length) == 1) {
$discovered | first
} else {
error make {
msg: $"Multiple managed note files found for ($note_id): (($discovered | str join ', '))"
}
}
}
def determine-write-target [manifest: record] {
let output_path = (resolve-managed-output-path $manifest.note_id $manifest.output_path)
if not ($output_path | path exists) {
return {
output_path: $output_path
write_path: $output_path
write_mode: 'create'
updated_main_output: true
}
}
let parsed = (parse-output-frontmatter $output_path)
let managed_by = (unquote ($parsed.managed_by? | default ''))
let frontmatter_note_id = (unquote ($parsed.note_id? | default ''))
if ($managed_by == 'notability-ingest' and $frontmatter_note_id == $manifest.note_id) {
return {
output_path: $output_path
write_path: $output_path
write_mode: 'overwrite'
updated_main_output: true
}
}
{
output_path: $output_path
write_path: (conflict-path-for $output_path)
write_mode: 'conflict'
updated_main_output: false
}
}
def build-markdown [manifest: record, normalized: string] {
let body = ($normalized | str trim)
let output_body = if $body == '' {
$"# ($manifest.title)"
} else {
$body
}
let created = ($manifest.requested_at | str substring 0..9)
let updated = ((date now) | format date '%Y-%m-%d')
[
'---'
$"title: ($manifest.title | to json)"
$"created: ($created | to json)"
$"updated: ($updated | to json)"
'source: "notability"'
$"source_transport: (($manifest.source_transport? | default 'webdav') | to json)"
$"source_relpath: ($manifest.source_relpath | to json)"
$"note_id: ($manifest.note_id | to json)"
'managed_by: "notability-ingest"'
$"source_file: ($manifest.archive_path | to json)"
$"source_file_hash: ($'sha256:($manifest.source_hash)' | to json)"
$"source_format: ((source-format $manifest.archive_path) | to json)"
'status: "active"'
'tags:'
' - handwritten'
' - notability'
'---'
''
$output_body
''
] | str join "\n"
}
def render-pages [input_path: path, job_id: string] {
let extension = (([$input_path] | path parse | first).extension? | default '' | str downcase)
if $extension == 'png' {
[ $input_path ]
} else if $extension == 'pdf' {
let render_dir = [(render-root) $job_id] | path join
mkdir $render_dir
let prefix = [$render_dir 'page'] | path join
^pdftoppm -png -r 200 $input_path $prefix
let pages = ((glob $"($render_dir)/*.png") | sort)
if ($pages | is-empty) {
error make {
msg: $"No PNG pages rendered from ($input_path)"
}
}
$pages
} else {
error make {
msg: $"Unsupported Notability input format: ($input_path)"
}
}
}
def call-pi [timeout_window: string, prompt: string, inputs: list<path>, thinking: string] {
let prompt_file = (^mktemp --suffix '.md' | str trim)
$prompt | save -f $prompt_file
let input_refs = ($inputs | each {|input| $'@($input)' })
let prompt_ref = $'@($prompt_file)'
let result = (try {
^timeout $timeout_window pi --model $vision_model --thinking $thinking --no-tools --no-session -p ...$input_refs $prompt_ref | complete
} catch {|error|
rm -f $prompt_file
error make {
msg: (error-message $error)
}
})
rm -f $prompt_file
let output = ($result.stdout | str trim)
if $output != '' {
$output
} else {
let stderr = ($result.stderr | str trim)
if $stderr == '' {
error make {
msg: $"pi returned no output (exit ($result.exit_code))"
}
} else {
error make {
msg: $"pi returned no output (exit ($result.exit_code)): ($stderr)"
}
}
}
}
def ingest-job [manifest: record] {
mkdir $manifest.session_dir
let page_paths = (render-pages $manifest.input_path $manifest.job_id)
let transcribe_prompt = ([
'Transcribe this note into clean Markdown.'
''
'Read it like a human and reconstruct the intended reading order and structure.'
''
'Do not preserve handwritten layout literally.'
''
'Handwritten line breaks, word stacking, font size changes, and spacing are not semantic structure by default.'
''
'If adjacent handwritten lines clearly belong to one sentence or short phrase, merge them into normal prose with spaces instead of separate Markdown lines.'
''
'Only keep separate lines or blank lines when there is clear evidence of separate paragraphs, headings, list items, checkboxes, or other distinct blocks.'
''
'Keep headings, lists, and paragraphs when they are genuinely present.'
''
'Do not summarize. Do not add commentary. Return Markdown only.'
] | str join "\n")
print $"Transcribing ($manifest.job_id) with page count ($page_paths | length)"
let transcript = (call-pi $transcribe_timeout $transcribe_prompt $page_paths 'low')
mkdir ($manifest.transcript_path | path dirname)
$"($transcript)\n" | save -f $manifest.transcript_path
let normalize_prompt = ([
'Rewrite the attached transcription into clean Markdown.'
''
'Preserve the same content and intended structure.'
''
'Collapse layout-only line breaks from handwriting.'
''
'If short adjacent lines are really one sentence or phrase, join them with spaces instead of keeping one line per handwritten row.'
''
'Use separate lines only for real headings, list items, checkboxes, or distinct paragraphs.'
''
'Do not summarize. Return Markdown only.'
] | str join "\n")
print $"Normalizing ($manifest.job_id)"
let normalized = (call-pi $normalize_timeout $normalize_prompt [ $manifest.transcript_path ] 'off')
let markdown = (build-markdown $manifest $normalized)
let target = (determine-write-target $manifest)
mkdir ($target.write_path | path dirname)
$markdown | save -f $target.write_path
{
success: true
job_id: $manifest.job_id
note_id: $manifest.note_id
archive_path: $manifest.archive_path
source_hash: $manifest.source_hash
session_dir: $manifest.session_dir
output_path: $target.output_path
output_hash: (if $target.updated_main_output { sha256 $target.write_path } else { null })
conflict_path: (if $target.write_mode == 'conflict' { $target.write_path } else { null })
write_mode: $target.write_mode
updated_main_output: $target.updated_main_output
transcript_path: $manifest.transcript_path
}
}
def mark-failure [job: record, running_path: string, error_summary: string, result?: any] {
let finished_at = (now-iso)
sql-run $"
update jobs
set status = 'failed',
finished_at = (sql-quote $finished_at),
error_summary = (sql-quote $error_summary),
job_manifest_path = (sql-quote (manifest-path-for $job.job_id 'failed'))
where job_id = (sql-quote $job.job_id);
update notes
set status = 'failed',
last_error = (sql-quote $error_summary)
where note_id = (sql-quote $job.note_id);
"
| ignore
if $result != null and ($result.archive_path? | default null) != null {
sql-run $"
update versions
set ingest_result = 'failed',
session_path = (sql-quote ($result.session_dir? | default ''))
where archive_path = (sql-quote $result.archive_path);
"
| ignore
}
let failed_path = (manifest-path-for $job.job_id 'failed')
if ($running_path | path exists) {
mv -f $running_path $failed_path
}
log-event $job.note_id 'job-failed' {
job_id: $job.job_id
error: $error_summary
}
}
def mark-success [job: record, running_path: string, result: record] {
let finished_at = (now-iso)
let note_status = if ($result.write_mode? | default 'write') == 'conflict' {
'conflict'
} else {
'active'
}
let output_path_q = (sql-quote ($result.output_path? | default null))
let output_hash_update = if ($result.updated_main_output? | default false) {
sql-quote ($result.output_hash? | default null)
} else {
'last_generated_output_hash'
}
let source_hash_update = if ($result.updated_main_output? | default false) {
sql-quote ($result.source_hash? | default null)
} else {
'last_generated_source_hash'
}
sql-run $"
update jobs
set status = 'done',
finished_at = (sql-quote $finished_at),
error_summary = null,
job_manifest_path = (sql-quote (manifest-path-for $job.job_id 'done'))
where job_id = (sql-quote $job.job_id);
update notes
set status = (sql-quote $note_status),
output_path = ($output_path_q),
last_processed_at = (sql-quote $finished_at),
last_generated_output_hash = ($output_hash_update),
last_generated_source_hash = ($source_hash_update),
conflict_path = (sql-quote ($result.conflict_path? | default null)),
last_error = null
where note_id = (sql-quote $job.note_id);
update versions
set ingest_result = 'success',
session_path = (sql-quote ($result.session_dir? | default ''))
where archive_path = (sql-quote $result.archive_path);
"
| ignore
let done_path = (manifest-path-for $job.job_id 'done')
if ($running_path | path exists) {
mv -f $running_path $done_path
}
^touch (qmd-dirty-file)
log-event $job.note_id 'job-finished' {
job_id: $job.job_id
write_mode: ($result.write_mode? | default 'write')
output_path: ($result.output_path? | default '')
conflict_path: ($result.conflict_path? | default '')
}
}
def recover-running-jobs [] {
let jobs = (sql-json "
select job_id, note_id, job_manifest_path, result_path
from jobs
where status = 'running'
order by started_at asc;
")
for job in $jobs {
let running_path = (manifest-path-for $job.job_id 'running')
let result = if ($job.result_path | path exists) {
open $job.result_path
} else {
null
}
mark-failure $job $running_path 'worker interrupted before completion' $result
}
}
def process-job [job: record] {
let running_path = (manifest-path-for $job.job_id 'running')
mv -f $job.job_manifest_path $running_path
sql-run $"
update jobs
set status = 'running',
started_at = (sql-quote (now-iso)),
job_manifest_path = (sql-quote $running_path)
where job_id = (sql-quote $job.job_id);
"
| ignore
print $"Processing ($job.job_id) for ($job.note_id)"
let manifest = (open $running_path)
try {
let result = (ingest-job $manifest)
write-result $job.result_path $result
mark-success $job $running_path $result
} catch {|error|
let message = (error-message $error)
let result = {
success: false
job_id: $manifest.job_id
note_id: $manifest.note_id
archive_path: $manifest.archive_path
source_hash: $manifest.source_hash
session_dir: $manifest.session_dir
error: $message
}
write-result $job.result_path $result
mark-failure $job $running_path $message $result
}
}
def drain-queued-jobs [] {
loop {
let job = (next-queued-job)
if $job == null {
maybe-update-qmd
break
}
process-job $job
maybe-update-qmd
}
}
export def worker-run [--drain] {
ensure-layout
recover-running-jobs
if $drain {
drain-queued-jobs
return
}
while true {
let job = (next-queued-job)
if $job == null {
maybe-update-qmd
sleep $idle_sleep
continue
}
process-job $job
maybe-update-qmd
}
}
def main [--drain] {
worker-run --drain=$drain
}

View File

@@ -6,6 +6,7 @@
- `jj tug` is an alias for `jj bookmark move --from closest_bookmark(@-) --to @-`. - `jj tug` is an alias for `jj bookmark move --from closest_bookmark(@-) --to @-`.
- Never attempt historically destructive Git commands. - Never attempt historically destructive Git commands.
- Make small, frequent commits. - Make small, frequent commits.
- "Commit" means `jj commit`, not `jj desc`; `desc` stays on the same working copy.
## Scripting ## Scripting

View File

@@ -0,0 +1,49 @@
---
description: Turn pasted Albanian lesson into translated notes and solved exercises in zk
---
Process the pasted Albanian lesson content and create two `zk` notes: one for lesson material and one for exercises.
<lesson-material>
$ARGUMENTS
</lesson-material>
Requirements:
1. Parse the lesson content and produce two markdown outputs:
- `material` output: lesson material only.
- `exercises` output: exercises and solutions.
2. Use today's date in both notes (date in title and inside content).
3. In the `material` output:
- Keep clean markdown structure with headings and bullet points.
- Do not add a top-level title heading (no `# ...`) because `zk new --title` already sets the note title.
- Translate examples, dialogues, and all lesson texts into English when not already translated.
- For bigger reading passages, include a word-by-word breakdown.
- For declension/conjugation/grammar tables, provide a complete table of possibilities relevant to the topic.
- Spell out numbers only when the source token is Albanian; do not spell out English numbers.
4. In the `exercises` output:
- Include every exercise in markdown.
- Do not add a top-level title heading (no `# ...`) because `zk new --title` already sets the note title.
- Translate each exercise to English.
- Solve all non-free-writing tasks (multiple choice, fill in the blanks, etc.) and include example solutions.
- For free-writing tasks, provide expanded examples using basic vocabulary from the lesson (if prompted for 3, provide 10).
- Translate free-writing example answers into English.
- Spell out numbers only when the source token is Albanian; do not spell out English numbers.
Execution steps:
1. Generate two markdown contents in memory (do not create temporary files):
- `MATERIAL_CONTENT`
- `EXERCISES_CONTENT`
2. Set `TODAY="$(date +%F)"` once and reuse it for both notes.
3. Create note 1 with `zk` by piping markdown directly to stdin:
- Title format: `Albanian Lesson Material - YYYY-MM-DD`
- Command pattern:
- `printf "%s\n" "$MATERIAL_CONTENT" | zk new --interactive --title "Albanian Lesson Material - $TODAY" --date "$TODAY" --print-path`
4. Create note 2 with `zk` by piping markdown directly to stdin:
- Title format: `Albanian Lesson Exercises - YYYY-MM-DD`
- Command pattern:
- `printf "%s\n" "$EXERCISES_CONTENT" | zk new --interactive --title "Albanian Lesson Exercises - $TODAY" --date "$TODAY" --print-path`
5. Print both created note paths and a short checklist of what was included.
If no lesson material was provided in `$ARGUMENTS`, stop and ask the user to paste it.

View File

@@ -0,0 +1,108 @@
---
description: Triage inbox one message at a time with himalaya only
---
Process email with strict manual triage using Himalaya only.
Hard requirements:
- Use `himalaya` for every mailbox interaction (folders, listing, reading, moving, deleting, attachments).
- Process exactly one message ID at a time. Never run bulk actions on multiple IDs.
- Do not use pattern-matching commands or searches (`grep`, `rg`, `awk`, `sed`, `himalaya envelope list` query filters, etc.).
- Always inspect current folders first, then triage.
- Treat this as a single deterministic run over a snapshot of message IDs discovered during this run.
- Ingest valuable document attachments into Paperless (see Document Ingestion section below).
Workflow:
1. Run `himalaya folder list` first and use those folders as the primary taxonomy.
2. Use this existing folder set as defaults when it fits:
- `INBOX`
- `Correspondence`
- `Orders and Invoices`
- `Payments`
- `Outgoing Shipments`
- `Newsletters and Marketing`
- `Junk`
- `Deleted Messages`
3. Determine source folder:
- If `$ARGUMENTS` is a single known folder name (matches a folder from step 1), use that as source.
- Otherwise use `INBOX`.
4. Build a run scope safely:
- List with fixed page size `20` and JSON output: `himalaya envelope list -f "<source>" -p 1 -s 20 --output json`.
- Start at page `1`. Enumerate IDs in returned order.
- Process each ID fully before touching the next ID.
- Keep an in-memory reviewed set for this run to avoid reprocessing IDs already handled or intentionally left untouched.
- When all IDs on the current page are in the reviewed set, advance to the next page.
- Stop when a page returns fewer results than the page size (end of folder) and all its IDs are in the reviewed set.
5. For each single envelope ID, do all checks before any move/delete:
- Check envelope flags from the JSON listing (seen/answered/flagged) before reading.
- Read the message: `himalaya message read -f "<source>" <id>`.
- If needed for classification or ingestion, download attachments: `himalaya attachment download -f "<source>" <id> --dir /tmp/himalaya-triage`.
- If the message qualifies for document ingestion (see Document Ingestion below), copy eligible attachments to the Paperless consume directory before cleanup.
- Always `rm` downloaded files from `/tmp/himalaya-triage` after processing (whether ingested or not).
- Move: `himalaya message move -f "<source>" "<destination>" <id>`.
- Delete: `himalaya message delete -f "<source>" <id>`.
6. Classification precedence (higher rule wins on conflict):
- **Actionable and unhandled** — if the message needs a reply, requires manual payment, needs a confirmation, or demands any human action, AND has NOT been replied to (no `answered` flag), leave it in the source folder untouched. This is the highest-priority rule: anything that still needs attention stays in `INBOX`.
- Human correspondence already handled — freeform natural-language messages written by a human that have been replied to (`answered` flag set): move to `Correspondence`.
- Human communication not yet replied to but not clearly actionable — when in doubt whether a human message requires action, leave it untouched.
- Clearly ephemeral automated/system message (alerts, bot/status updates, OTP/2FA, password reset codes, login codes) with no archival value: move to `Deleted Messages`.
- Automatic payment transaction notifications (charge/payment confirmations, receipts, failed-payment notices, provider payment events such as Klarna/PayPal/Stripe) that are purely informational and require no action: move to `Payments`.
- Subscription renewal notifications (auto-renew reminders, "will renew soon", price-change notices without a concrete transaction) are operational alerts, not payment records: move to `Deleted Messages`.
- Installment plan activation notifications (e.g. Barclays installment purchase confirmations) are operational confirmations, not payment records: move to `Deleted Messages`.
- "Kontoauszug verfügbar/ist online" notifications are availability alerts, not payment records: move to `Deleted Messages`.
- Orders/invoices/business records: move to `Orders and Invoices`.
- Shipping/tracking notifications (dispatch confirmations, carrier updates, delivery ETAs) without invoice or order-document value: move to `Deleted Messages`.
- Marketing/newsletters: move to `Newsletters and Marketing`.
- Delivery/submission confirmations for items you shipped outbound: move to `Outgoing Shipments`.
- Long-term but uncategorized messages: create a concise new folder and move there.
7. Folder creation rule:
- Create a new folder only if no existing folder fits and the message should be kept.
- Naming constraints: concise topic name, avoid duplicates, and avoid broad catch-all names.
- Command: `himalaya folder add "<new-folder>"`.
Document Ingestion (Paperless):
- **Purpose**: Automatically archive valuable document attachments into Paperless via its consumption directory.
- **Ingestion path**: `/var/lib/paperless/consume/inbox-triage/`
- **When to ingest**: Only for messages whose attachments have long-term archival value. Eligible categories:
- Invoices, receipts, and billing statements (messages going to `Orders and Invoices` or `Payments`)
- Contracts, agreements, and legal documents
- Tax documents, account statements, and financial summaries
- Insurance documents and policy papers
- Official correspondence with document attachments (government, institutions)
- **When NOT to ingest**:
- Marketing emails, newsletters, promotional material
- Shipping/tracking notifications without invoice attachments
- OTP codes, login alerts, password resets, ephemeral notifications
- Subscription renewal reminders without actual invoices
- Duplicate documents already seen in this run
- Inline images, email signatures, logos, and non-document attachments
- **Eligible file types**: PDF, PNG, JPG/JPEG, TIFF, WEBP (documents and scans only). Skip archive files (ZIP, etc.), calendar invites (ICS), and other non-document formats.
- **Procedure**:
1. After downloading attachments to `/tmp/himalaya-triage`, check if any are eligible documents.
2. Copy eligible files: `cp /tmp/himalaya-triage/<filename> /var/lib/paperless/consume/inbox-triage/`
3. If multiple messages could produce filename collisions, prefix the filename with the message ID: `<id>-<filename>`.
4. Log each ingested file in the action log at the end of the run.
- **Conservative rule**: When in doubt whether an attachment is worth archiving, skip it. Paperless storage is cheap, but noise degrades searchability. Prefer false negatives over false positives for marketing material, but prefer false positives over false negatives for anything that looks like a financial or legal document.
Execution rules:
- Never perform bulk operations. One message ID per `read`, `move`, `delete`, and attachment command.
- Always use page size 20 for envelope listing (`-s 20`).
- If any single-ID command fails, log the error and continue with the next unreviewed ID.
- Never skip reading message content before deciding.
- Keep decisions conservative: when in doubt about whether something needs action, leave it in `INBOX`.
- Never move or delete unhandled actionable messages.
- Never move human communications that haven't been replied to, unless clearly non-actionable.
- Define "processed" as "reviewed once in this run" (including intentionally untouched human messages).
- Include only messages observed during this run's listings; if new mail arrives mid-run, leave it for the next run.
- Report a compact action log at the end with:
- source folder,
- total reviewed IDs,
- counts by action (untouched/moved-to-folder/deleted),
- per-destination-folder counts,
- created folders,
- documents ingested to Paperless (count and filenames),
- short rationale for non-obvious classifications.
<user-request>
$ARGUMENTS
</user-request>

View File

@@ -0,0 +1,150 @@
---
description: Initialize Supermemory with comprehensive codebase knowledge
---
# Initializing Supermemory
You are initializing persistent memory for this codebase. This is not just data collection - you're building context that will make you significantly more effective across all future sessions.
## Understanding Context
You are a **stateful** coding agent. Users expect to work with you over extended periods - potentially the entire lifecycle of a project. Your memory is how you get better over time and maintain continuity.
## What to Remember
### 1. Procedures (Rules & Workflows)
Explicit rules that should always be followed:
- "Never commit directly to main - always use feature branches"
- "Always run lint before tests"
- "Use conventional commits format"
### 2. Preferences (Style & Conventions)
Project and user coding style:
- "Prefer functional components over class components"
- "Use early returns instead of nested conditionals"
- "Always add JSDoc to exported functions"
### 3. Architecture & Context
How the codebase works and why:
- "Auth system was refactored in v2.0 - old patterns deprecated"
- "The monorepo used to have 3 modules before consolidation"
- "This pagination bug was fixed before - similar to PR #234"
## Memory Scopes
**Project-scoped** (\`scope: "project"\`):
- Build/test/lint commands
- Architecture and key directories
- Team conventions specific to this codebase
- Technology stack and framework choices
- Known issues and their solutions
**User-scoped** (\`scope: "user"\`):
- Personal coding preferences across all projects
- Communication style preferences
- General workflow habits
## Research Approach
This is a **deep research** initialization. Take your time and be thorough (~50+ tool calls). The goal is to genuinely understand the project, not just collect surface-level facts.
**What to uncover:**
- Tech stack and dependencies (explicit and implicit)
- Project structure and architecture
- Build/test/deploy commands and workflows
- Contributors & team dynamics (who works on what?)
- Commit conventions and branching strategy
- Code evolution (major refactors, architecture changes)
- Pain points (areas with lots of bug fixes)
- Implicit conventions not documented anywhere
## Research Techniques
### File-based
- README.md, CONTRIBUTING.md, AGENTS.md, CLAUDE.md
- Package manifests (package.json, Cargo.toml, pyproject.toml, go.mod)
- Config files (.eslintrc, tsconfig.json, .prettierrc)
- CI/CD configs (.github/workflows/)
### Git-based
- \`git log --oneline -20\` - Recent history
- \`git branch -a\` - Branching strategy
- \`git log --format="%s" -50\` - Commit conventions
- \`git shortlog -sn --all | head -10\` - Main contributors
### Explore Agent
Fire parallel explore queries for broad understanding:
\`\`\`
Task(explore, "What is the tech stack and key dependencies?")
Task(explore, "What is the project structure? Key directories?")
Task(explore, "How do you build, test, and run this project?")
Task(explore, "What are the main architectural patterns?")
Task(explore, "What conventions or patterns are used?")
\`\`\`
## How to Do Thorough Research
**Don't just collect data - analyze and cross-reference.**
Bad (shallow):
- Run commands, copy output
- List facts without understanding
Good (thorough):
- Cross-reference findings (if inconsistent, dig deeper)
- Resolve ambiguities (don't leave questions unanswered)
- Read actual file content, not just names
- Look for patterns (what do commits tell you about workflow?)
- Think like a new team member - what would you want to know?
## Saving Memories
Use the \`supermemory\` tool for each distinct insight:
\`\`\`
supermemory(mode: "add", content: "...", type: "...", scope: "project")
\`\`\`
**Types:**
- \`project-config\` - tech stack, commands, tooling
- \`architecture\` - codebase structure, key components, data flow
- \`learned-pattern\` - conventions specific to this codebase
- \`error-solution\` - known issues and their fixes
- \`preference\` - coding style preferences (use with user scope)
**Guidelines:**
- Save each distinct insight as a separate memory
- Be concise but include enough context to be useful
- Include the "why" not just the "what" when relevant
- Update memories incrementally as you research (don't wait until the end)
**Good memories:**
- "Uses Bun runtime and package manager. Commands: bun install, bun run dev, bun test"
- "API routes in src/routes/, handlers in src/handlers/. Hono framework."
- "Auth uses Redis sessions, not JWT. Implementation in src/lib/auth.ts"
- "Never use \`any\` type - strict TypeScript. Use \`unknown\` and narrow."
- "Database migrations must be backward compatible - we do rolling deploys"
## Upfront Questions
Before diving in, ask:
1. "Any specific rules I should always follow?"
2. "Preferences for how I communicate? (terse/detailed)"
## Reflection Phase
Before finishing, reflect:
1. **Completeness**: Did you cover commands, architecture, conventions, gotchas?
2. **Quality**: Are memories concise and searchable?
3. **Scope**: Did you correctly separate project vs user knowledge?
Then ask: "I've initialized memory with X insights. Want me to continue refining, or is this good?"
## Your Task
1. Ask upfront questions (research depth, rules, preferences)
2. Check existing memories: \`supermemory(mode: "list", scope: "project")\`
3. Research based on chosen depth
4. Save memories incrementally as you discover insights
5. Reflect and verify completeness
6. Summarize what was learned and ask if user wants refinement

View File

@@ -0,0 +1,49 @@
import type { Plugin } from "@opencode-ai/plugin";
const COMMAND_PREFIXES = new Set([
"env",
"command",
"builtin",
"time",
"sudo",
"nohup",
"nice",
]);
function findCommandWord(words: string[]): string | undefined {
for (const word of words) {
if (COMMAND_PREFIXES.has(word)) continue;
if (/^[A-Za-z_][A-Za-z0-9_]*=/.test(word)) continue;
return word;
}
return undefined;
}
function segmentHasGit(words: string[]): boolean {
const cmd = findCommandWord(words);
return cmd === "git";
}
function containsBlockedGit(command: string): boolean {
const segments = command.split(/\s*(?:&&|\|\||[;&|]|\$\(|`)\s*/);
for (const segment of segments) {
const words = segment.trim().split(/\s+/).filter(Boolean);
if (segmentHasGit(words)) return true;
}
return false;
}
export const BlockGitPlugin: Plugin = async () => {
return {
"tool.execute.before": async (input, output) => {
if (input.tool === "bash") {
const command = output.args.command as string;
if (containsBlockedGit(command)) {
throw new Error(
"This project uses jj, only use `jj` commands, not `git`.",
);
}
}
},
};
};

View File

@@ -0,0 +1,19 @@
import type { Plugin } from "@opencode-ai/plugin";
const SCRIPTING_PATTERN =
/(?:^|[;&|]\s*|&&\s*|\|\|\s*|\$\(\s*|`\s*)(?:python[23]?|perl|ruby|php|lua|node\s+-e|bash\s+-c|sh\s+-c)\s/;
export const BlockScriptingPlugin: Plugin = async () => {
return {
"tool.execute.before": async (input, output) => {
if (input.tool === "bash") {
const command = output.args.command as string;
if (SCRIPTING_PATTERN.test(command)) {
throw new Error(
"Do not use python, perl, ruby, php, lua, or inline bash/sh for scripting. Use `nu -c` instead.",
);
}
}
},
};
};

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,41 @@
---
name: frontend-design
description: Create distinctive, production-grade frontend interfaces with high design quality. Use this skill when the user asks to build web components, pages, artifacts, posters, or applications (examples include websites, landing pages, dashboards, React components, HTML/CSS layouts, or when styling/beautifying any web UI). Generates creative, polished code and UI design that avoids generic AI aesthetics.
---
This skill guides creation of distinctive, production-grade frontend interfaces that avoid generic "AI slop" aesthetics. Implement real working code with exceptional attention to aesthetic details and creative choices.
The user provides frontend requirements: a component, page, application, or interface to build. They may include context about the purpose, audience, or technical constraints.
## Design Thinking
Before coding, understand the context and commit to a BOLD aesthetic direction:
- **Purpose**: What problem does this interface solve? Who uses it?
- **Tone**: Pick an extreme: brutally minimal, maximalist chaos, retro-futuristic, organic/natural, luxury/refined, playful/toy-like, editorial/magazine, brutalist/raw, art deco/geometric, soft/pastel, industrial/utilitarian, etc. There are so many flavors to choose from. Use these for inspiration but design one that is true to the aesthetic direction.
- **Constraints**: Technical requirements (framework, performance, accessibility).
- **Differentiation**: What makes this UNFORGETTABLE? What's the one thing someone will remember?
**CRITICAL**: Choose a clear conceptual direction and execute it with precision. Bold maximalism and refined minimalism both work - the key is intentionality, not intensity.
Then implement working code (HTML/CSS/JS, React, Vue, etc.) that is:
- Production-grade and functional
- Visually striking and memorable
- Cohesive with a clear aesthetic point-of-view
- Meticulously refined in every detail
## Frontend Aesthetics Guidelines
Focus on:
- **Typography**: Choose fonts that are beautiful, unique, and interesting. Avoid generic fonts like Arial and Inter; opt instead for distinctive choices that elevate the frontend's aesthetics; unexpected, characterful font choices. Pair a distinctive display font with a refined body font.
- **Color & Theme**: Commit to a cohesive aesthetic. Use CSS variables for consistency. Dominant colors with sharp accents outperform timid, evenly-distributed palettes.
- **Motion**: Use animations for effects and micro-interactions. Prioritize CSS-only solutions for HTML. Use Motion library for React when available. Focus on high-impact moments: one well-orchestrated page load with staggered reveals (animation-delay) creates more delight than scattered micro-interactions. Use scroll-triggering and hover states that surprise.
- **Spatial Composition**: Unexpected layouts. Asymmetry. Overlap. Diagonal flow. Grid-breaking elements. Generous negative space OR controlled density.
- **Backgrounds & Visual Details**: Create atmosphere and depth rather than defaulting to solid colors. Add contextual effects and textures that match the overall aesthetic. Apply creative forms like gradient meshes, noise textures, geometric patterns, layered transparencies, dramatic shadows, decorative borders, custom cursors, and grain overlays.
NEVER use generic AI-generated aesthetics like overused font families (Inter, Roboto, Arial, system fonts), cliched color schemes (particularly purple gradients on white backgrounds), predictable layouts and component patterns, and cookie-cutter design that lacks context-specific character.
Interpret creatively and make unexpected choices that feel genuinely designed for the context. No design should be the same. Vary between light and dark themes, different fonts, different aesthetics. NEVER converge on common choices (Space Grotesk, for example) across generations.
**IMPORTANT**: Match implementation complexity to the aesthetic vision. Maximalist designs need elaborate code with extensive animations and effects. Minimalist or refined designs need restraint, precision, and careful attention to spacing, typography, and subtle details. Elegance comes from executing the vision well.
Remember: Claude is capable of extraordinary creative work. Don't hold back, show what can truly be created when thinking outside the box and committing fully to a distinctive vision.

View File

@@ -1,15 +1,15 @@
{inputs, ...}: final: prev: let {inputs, ...}: final: prev: let
version = "0.24.0"; version = "0.24.1";
srcs = { srcs = {
x86_64-linux = x86_64-linux =
prev.fetchurl { prev.fetchurl {
url = "https://github.com/trycog/cog-cli/releases/download/v${version}/cog-linux-x86_64.tar.gz"; url = "https://github.com/trycog/cog-cli/releases/download/v${version}/cog-linux-x86_64.tar.gz";
hash = "sha256-9Ka7rPIlWtLVxRg9yNQCNz16AE4j0zGf2TW7xBXrksM="; hash = "sha256-/ioEuM58F3ppO0wlc5nw7ZNHunoweOXL/Gda65r0Ig4=";
}; };
aarch64-darwin = aarch64-darwin =
prev.fetchurl { prev.fetchurl {
url = "https://github.com/trycog/cog-cli/releases/download/v${version}/cog-darwin-arm64.tar.gz"; url = "https://github.com/trycog/cog-cli/releases/download/v${version}/cog-darwin-arm64.tar.gz";
hash = "sha256-YNONHRmPGDhJeF+7rcWmrjqktYpi4b6bLl+M7IEFDtU="; hash = "sha256-o/A2hVU3Jzmlzx5RbGLFCpfGAghcLGTD8Bm+bVR5OkQ=";
}; };
}; };
in { in {

View File

@@ -1,10 +0,0 @@
{inputs, ...}: final: prev: {
pi-agent-stuff =
prev.buildNpmPackage {
pname = "pi-agent-stuff";
version = "1.5.0";
src = inputs.pi-agent-stuff;
npmDepsHash = "sha256-pyXMNdlie8vAkhz2f3GUGT3CCYuwt+xkWnsijBajXIo=";
dontNpmBuild = true;
};
}

View File

@@ -1,33 +0,0 @@
{inputs, ...}: final: prev: {
pi-harness =
prev.stdenvNoCC.mkDerivation {
pname = "pi-harness";
version = "0.0.0";
src = inputs.pi-harness;
pnpmDeps =
prev.fetchPnpmDeps {
pname = "pi-harness";
version = "0.0.0";
src = inputs.pi-harness;
pnpm = prev.pnpm_10;
fetcherVersion = 3;
hash = "sha256-lNcZRCmmwq9t05UjVWcuGq+ZzRHuHNmqKQIVPh6DoxQ=";
};
nativeBuildInputs = [
prev.pnpmConfigHook
prev.pnpm_10
prev.nodejs
];
dontBuild = true;
installPhase = ''
runHook preInstall
mkdir -p $out/lib/node_modules/@aliou/pi-harness
cp -r . $out/lib/node_modules/@aliou/pi-harness
runHook postInstall
'';
};
}

View File

@@ -1,10 +0,0 @@
{inputs, ...}: final: prev: {
pi-mcp-adapter =
prev.buildNpmPackage {
pname = "pi-mcp-adapter";
version = "2.2.0";
src = inputs.pi-mcp-adapter;
npmDepsHash = "sha256-myJ9h/zC/KDddt8NOVvJjjqbnkdEN4ZR+okCR5nu7hM=";
dontNpmBuild = true;
};
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,44 +0,0 @@
{inputs, ...}: final: prev: {
qmd =
prev.buildNpmPackage rec {
pname = "qmd";
version = "2.0.1";
src = inputs.qmd;
npmDepsFetcherVersion = 2;
npmDepsHash = "sha256-sAyCG43p3JELQ2lazwRrsdmW9Q4cOy45X6ZagBmitGU=";
nativeBuildInputs = [
prev.makeWrapper
prev.python3
prev.pkg-config
prev.cmake
];
buildInputs = [prev.sqlite];
dontConfigure = true;
postPatch = ''
cp ${./qmd-package-lock.json} package-lock.json
'';
npmBuildScript = "build";
dontNpmPrune = true;
installPhase = ''
runHook preInstall
mkdir -p $out/lib/node_modules/qmd $out/bin
cp -r bin dist node_modules package.json package-lock.json LICENSE CHANGELOG.md $out/lib/node_modules/qmd/
makeWrapper ${prev.nodejs}/bin/node $out/bin/qmd \
--add-flags $out/lib/node_modules/qmd/dist/cli/qmd.js \
--set LD_LIBRARY_PATH ${prev.lib.makeLibraryPath [prev.sqlite]}
runHook postInstall
'';
meta = with prev.lib; {
description = "On-device search engine for markdown notes, meeting transcripts, and knowledge bases";
homepage = "https://github.com/tobi/qmd";
license = licenses.mit;
mainProgram = "qmd";
platforms = platforms.unix;
};
};
}

View File

@@ -10,7 +10,7 @@ in {
... ...
}: { }: {
home.packages = [ home.packages = [
inputs'.llm-agents.packages.pi inputs'.llm-agents.packages.claude-code
pkgs.cog-cli pkgs.cog-cli
]; ];
@@ -21,67 +21,160 @@ in {
} }
''; '';
home.file = { programs.opencode = {
"AGENTS.md".source = ./_ai-tools/AGENTS.md; enable = true;
".pi/agent/extensions/pi-elixir" = { package = inputs'.llm-agents.packages.opencode;
source = inputs.pi-elixir; tui = {
recursive = true; theme = "rosepine";
plugin = ["./plugin/review.ts"];
}; };
".pi/agent/extensions/pi-mcp-adapter" = { settings = {
source = "${pkgs.pi-mcp-adapter}/lib/node_modules/pi-mcp-adapter"; model = "openai/gpt-5.4";
recursive = true; small_model = "openai/gpt-5.1-codex-mini";
}; plugin = [
".pi/agent/extensions/no-git.ts".source = ./_ai-tools/extensions/no-git.ts; "opencode-claude-auth"
".pi/agent/extensions/no-scripting.ts".source = ./_ai-tools/extensions/no-scripting.ts; "opencode-supermemory"
".pi/agent/extensions/note-ingest.ts".source = ./_ai-tools/extensions/note-ingest.ts;
".pi/agent/extensions/review.ts".source = ./_ai-tools/extensions/review.ts;
".pi/agent/extensions/session-name.ts".source = ./_ai-tools/extensions/session-name.ts;
".pi/agent/notability" = {
source = ./_notability;
recursive = true;
};
".pi/agent/skills/elixir-dev" = {
source = "${inputs.pi-elixir}/skills/elixir-dev";
recursive = true;
};
".pi/agent/skills/jujutsu/SKILL.md".source = ./_ai-tools/skills/jujutsu/SKILL.md;
".pi/agent/skills/notability-transcribe/SKILL.md".source = ./_ai-tools/skills/notability-transcribe/SKILL.md;
".pi/agent/skills/notability-normalize/SKILL.md".source = ./_ai-tools/skills/notability-normalize/SKILL.md;
".pi/agent/themes" = {
source = "${inputs.pi-rose-pine}/themes";
recursive = true;
};
".pi/agent/settings.json".text =
builtins.toJSON {
theme = "rose-pine-dawn";
quietStartup = true;
hideThinkingBlock = true;
defaultProvider = "openai-codex";
defaultModel = "gpt-5.4";
defaultThinkingLevel = "high";
packages = [
{
source = "${pkgs.pi-agent-stuff}/lib/node_modules/mitsupi";
extensions = [
"pi-extensions/answer.ts"
"pi-extensions/context.ts"
"pi-extensions/multi-edit.ts"
"pi-extensions/todos.ts"
]; ];
skills = []; permission = {
prompts = []; external_directory = {
themes = []; "*" = "allow";
} "**/.gnupg/**" = "deny";
{ "**/.ssh/**" = "deny";
source = "${pkgs.pi-harness}/lib/node_modules/@aliou/pi-harness"; "~/.config/gh/hosts.yml" = "deny";
extensions = ["extensions/breadcrumbs/index.ts"]; "~/.config/sops/age/keys.txt" = "deny";
skills = []; "~/.local/share/opencode/mcp-auth.json" = "deny";
prompts = []; "/etc/ssh/ssh_host_*" = "deny";
themes = []; "/run/secrets/*" = "deny";
} };
bash = {
"*" = "allow";
env = "deny";
"env *" = "deny";
printenv = "deny";
"printenv *" = "deny";
"export *" = "deny";
"gh auth *" = "deny";
ssh = "ask";
"ssh *" = "ask";
mosh = "ask";
"mosh *" = "ask";
"cat *.env" = "deny";
"cat *.env.*" = "deny";
"cat **/.env" = "deny";
"cat **/.env.*" = "deny";
"cat *.envrc" = "deny";
"cat **/.envrc" = "deny";
"cat .dev.vars" = "deny";
"cat **/.dev.vars" = "deny";
"cat *.pem" = "deny";
"cat *.key" = "deny";
"cat **/.gnupg/**" = "deny";
"cat **/.ssh/**" = "deny";
"cat ~/.config/gh/hosts.yml" = "deny";
"cat ~/.config/sops/age/keys.txt" = "deny";
"cat ~/.local/share/opencode/mcp-auth.json" = "deny";
"cat /etc/ssh/ssh_host_*" = "deny";
"cat /run/secrets/*" = "deny";
};
edit = {
"*" = "allow";
"**/.gnupg/**" = "deny";
"**/.ssh/**" = "deny";
"**/secrets/**" = "deny";
"secrets/*" = "deny";
"~/.config/gh/hosts.yml" = "deny";
"~/.config/sops/age/keys.txt" = "deny";
"~/.local/share/opencode/mcp-auth.json" = "deny";
"/etc/ssh/ssh_host_*" = "deny";
"/run/secrets/*" = "deny";
};
glob = "allow";
grep = "allow";
list = "allow";
lsp = "allow";
question = "allow";
read = {
"*" = "allow";
"*.env" = "deny";
"*.env.*" = "deny";
"*.envrc" = "deny";
"**/.env" = "deny";
"**/.env.*" = "deny";
"**/.envrc" = "deny";
".dev.vars" = "deny";
"**/.dev.vars" = "deny";
"**/.gnupg/**" = "deny";
"**/.ssh/**" = "deny";
"*.key" = "deny";
"*.pem" = "deny";
"**/secrets/**" = "deny";
"secrets/*" = "deny";
"~/.config/gh/hosts.yml" = "deny";
"~/.config/sops/age/keys.txt" = "deny";
"~/.local/share/opencode/mcp-auth.json" = "deny";
"/etc/ssh/ssh_host_*" = "deny";
"/run/secrets/*" = "deny";
};
skill = "allow";
task = "allow";
webfetch = "allow";
websearch = "allow";
codesearch = "allow";
};
agent = {
explore = {
model = "openai/gpt-5.1-codex-mini";
};
};
instructions = [
"CLAUDE.md"
"AGENT.md"
# "AGENTS.md"
"AGENTS.local.md"
]; ];
formatter = {
mix = {
disabled = true;
}; };
".pi/agent/mcp.json".source = ./_ai-tools/mcp.json; };
mcp = {
opensrc = {
enabled = true;
type = "local";
command = ["node" "/home/cschmatzler/.bun/bin/opensrc-mcp"];
};
context7 = {
enabled = true;
type = "remote";
url = "https://mcp.context7.com/mcp";
};
grep_app = {
enabled = true;
type = "remote";
url = "https://mcp.grep.app";
};
};
};
};
xdg.configFile = {
# "opencode/agent" = {
# source = ./_opencode/agent;
# recursive = true;
# };
"opencode/command" = {
source = ./_opencode/command;
recursive = true;
};
"opencode/skill" = {
source = ./_opencode/skill;
recursive = true;
};
"opencode/plugin" = {
source = ./_opencode/plugin;
recursive = true;
};
"opencode/AGENTS.md".source = ./_opencode/AGENTS.md;
}; };
}; };
} }

View File

@@ -153,9 +153,12 @@ in {
"1password" "1password"
"alcove" "alcove"
"aqua-voice" "aqua-voice"
"chatgpt"
"ghostty@tip" "ghostty@tip"
"raycast" "raycast"
"spotify" "spotify"
"tailscale"
"whatsapp"
]; ];
}; };
}; };

View File

@@ -35,7 +35,6 @@
adguardhome = ./adguardhome.nix; adguardhome = ./adguardhome.nix;
cache = ./cache.nix; cache = ./cache.nix;
gitea = ./gitea.nix; gitea = ./gitea.nix;
notability = ./notability.nix;
opencode = ./opencode.nix; opencode = ./opencode.nix;
paperless = ./paperless.nix; paperless = ./paperless.nix;

View File

@@ -54,27 +54,6 @@
inputs.nixpkgs.follows = "nixpkgs"; inputs.nixpkgs.follows = "nixpkgs";
}; };
llm-agents.url = "github:numtide/llm-agents.nix"; llm-agents.url = "github:numtide/llm-agents.nix";
pi-agent-stuff = {
url = "github:mitsuhiko/agent-stuff";
flake = false;
};
pi-elixir = {
url = "github:dannote/pi-elixir";
flake = false;
};
pi-rose-pine = {
url = "github:zenobi-us/pi-rose-pine";
flake = false;
};
pi-harness = {
url = "github:aliou/pi-harness";
flake = false;
};
pi-mcp-adapter = {
url = "github:nicobailon/pi-mcp-adapter";
flake = false;
};
qmd.url = "github:tobi/qmd";
# Overlay inputs # Overlay inputs
himalaya.url = "github:pimalaya/himalaya"; himalaya.url = "github:pimalaya/himalaya";
jj-ryu = { jj-ryu = {

View File

@@ -2,6 +2,11 @@
local = import ./_lib/local.nix; local = import ./_lib/local.nix;
in { in {
den.aspects.email.homeManager = {pkgs, ...}: { den.aspects.email.homeManager = {pkgs, ...}: {
programs.aerc = {
enable = true;
extraConfig.general.unsafe-accounts-conf = true;
};
programs.himalaya = { programs.himalaya = {
enable = true; enable = true;
package = package =
@@ -50,6 +55,7 @@ in {
port = 993; port = 993;
tls.enable = true; tls.enable = true;
}; };
aerc.enable = true;
}; };
}; };
}; };

View File

@@ -20,6 +20,17 @@ in
den.aspects.email den.aspects.email
]; ];
homeManager = { homeManager = {
config,
inputs',
...
}: let
opencode = inputs'.llm-agents.packages.opencode;
in {
programs.opencode.settings.permission.external_directory = {
"/tmp/himalaya-triage/*" = "allow";
"/var/lib/paperless/consume/inbox-triage/*" = "allow";
};
programs.nushell.extraConfig = '' programs.nushell.extraConfig = ''
if $nu.is-interactive and ('SSH_CONNECTION' in ($env | columns)) and ('ZELLIJ' not-in ($env | columns)) { if $nu.is-interactive and ('SSH_CONNECTION' in ($env | columns)) and ('ZELLIJ' not-in ($env | columns)) {
try { try {
@@ -30,6 +41,30 @@ in
} }
} }
''; '';
systemd.user.services.opencode-inbox-triage = {
Unit = {
Description = "OpenCode inbox triage";
};
Service = {
Type = "oneshot";
ExecStart = "${opencode}/bin/opencode run --command inbox-triage --model opencode-go/glm-5";
Environment = "PATH=${config.home.profileDirectory}/bin:/run/current-system/sw/bin";
};
};
systemd.user.timers.opencode-inbox-triage = {
Unit = {
Description = "Run OpenCode inbox triage every 12 hours";
};
Timer = {
OnCalendar = "*-*-* 0/12:00:00";
Persistent = true;
};
Install = {
WantedBy = ["timers.target"];
};
};
}; };
}) })
(hostLib.mkPerHostAspect { (hostLib.mkPerHostAspect {
@@ -39,7 +74,6 @@ in
den.aspects.opencode-api-key den.aspects.opencode-api-key
den.aspects.adguardhome den.aspects.adguardhome
den.aspects.cache den.aspects.cache
den.aspects.notability
den.aspects.paperless den.aspects.paperless
]; ];
nixos = {...}: { nixos = {...}: {

View File

@@ -49,12 +49,27 @@
den.aspects.tailscale.nixos = { den.aspects.tailscale.nixos = {
services.tailscale = { services.tailscale = {
enable = true; enable = true;
extraSetFlags = ["--ssh"];
openFirewall = true; openFirewall = true;
permitCertUid = "caddy"; permitCertUid = "caddy";
useRoutingFeatures = "server"; useRoutingFeatures = "server";
}; };
}; };
den.aspects.mosh.nixos = {
programs.mosh = {
enable = true;
openFirewall = false;
};
networking.firewall.interfaces.tailscale0.allowedUDPPortRanges = [
{
from = 60000;
to = 61000;
}
];
};
den.aspects.tailscale.darwin = { den.aspects.tailscale.darwin = {
services.tailscale.enable = true; services.tailscale.enable = true;
}; };

View File

@@ -1,136 +0,0 @@
{lib, ...}: let
caddyLib = import ./_lib/caddy.nix;
local = import ./_lib/local.nix;
secretLib = import ./_lib/secrets.nix {inherit lib;};
inherit (local) user;
notabilityScripts = ./_notability;
tahani = local.hosts.tahani;
in {
den.aspects.notability.nixos = {
config,
inputs',
pkgs,
...
}: let
homeDir = tahani.home;
dataRoot = "${homeDir}/.local/share/notability-ingest";
stateRoot = "${homeDir}/.local/state/notability-ingest";
notesRoot = "${homeDir}/Notes";
webdavRoot = "${dataRoot}/webdav-root";
userPackages = with pkgs; [
qmd
poppler-utils
rclone
sqlite
zk
];
commonPath = with pkgs;
[
inputs'.llm-agents.packages.pi
coreutils
inotify-tools
nushell
util-linux
]
++ userPackages;
commonEnvironment = {
HOME = homeDir;
NOTABILITY_ARCHIVE_ROOT = "${dataRoot}/archive";
NOTABILITY_DATA_ROOT = dataRoot;
NOTABILITY_DB_PATH = "${stateRoot}/db.sqlite";
NOTABILITY_NOTES_DIR = notesRoot;
NOTABILITY_RENDER_ROOT = "${dataRoot}/rendered-pages";
NOTABILITY_SESSIONS_ROOT = "${stateRoot}/sessions";
NOTABILITY_STATE_ROOT = stateRoot;
NOTABILITY_TRANSCRIPT_ROOT = "${stateRoot}/transcripts";
NOTABILITY_WEBDAV_ROOT = webdavRoot;
XDG_CONFIG_HOME = "${homeDir}/.config";
};
mkTmpDirRule = path: "d ${path} 0755 ${user.name} users -";
mkNotabilityService = {
description,
script,
after ? [],
requires ? [],
environment ? {},
}: {
inherit after description requires;
wantedBy = ["multi-user.target"];
path = commonPath;
environment = commonEnvironment // environment;
serviceConfig = {
ExecStart = "${pkgs.nushell}/bin/nu ${notabilityScripts}/${script}";
Group = "users";
Restart = "always";
RestartSec = 5;
User = user.name;
WorkingDirectory = homeDir;
};
};
in {
sops.secrets.tahani-notability-webdav-password =
secretLib.mkUserBinarySecret {
name = "tahani-notability-webdav-password";
sopsFile = ../secrets/tahani-notability-webdav-password;
};
home-manager.users.${user.name} = {
home.packages = userPackages;
home.file.".config/qmd/index.yml".text = ''
collections:
notes:
path: ${notesRoot}
pattern: "**/*.md"
'';
};
systemd.tmpfiles.rules =
builtins.map mkTmpDirRule [
notesRoot
dataRoot
webdavRoot
"${dataRoot}/archive"
"${dataRoot}/rendered-pages"
stateRoot
"${stateRoot}/jobs"
"${stateRoot}/jobs/queued"
"${stateRoot}/jobs/running"
"${stateRoot}/jobs/failed"
"${stateRoot}/jobs/done"
"${stateRoot}/jobs/results"
"${stateRoot}/sessions"
"${stateRoot}/transcripts"
];
services.caddy.virtualHosts =
caddyLib.mkTailscaleVHost {
name = "tahani";
configText = ''
handle /notability* {
reverse_proxy 127.0.0.1:9980
}
'';
};
systemd.services.notability-webdav =
mkNotabilityService {
description = "Notability WebDAV landing zone";
script = "webdav.nu";
after = ["network.target"];
environment = {
NOTABILITY_WEBDAV_ADDR = "127.0.0.1:9980";
NOTABILITY_WEBDAV_BASEURL = "/notability";
NOTABILITY_WEBDAV_PASSWORD_FILE = config.sops.secrets.tahani-notability-webdav-password.path;
NOTABILITY_WEBDAV_USER = "notability";
};
};
systemd.services.notability-watch =
mkNotabilityService {
description = "Watch and ingest Notability WebDAV uploads";
script = "watch.nu";
after = ["notability-webdav.service"];
requires = ["notability-webdav.service"];
};
};
}

View File

@@ -20,14 +20,6 @@
(import ./_overlays/jj-ryu.nix {inherit inputs;}) (import ./_overlays/jj-ryu.nix {inherit inputs;})
# cog-cli # cog-cli
(import ./_overlays/cog-cli.nix {inherit inputs;}) (import ./_overlays/cog-cli.nix {inherit inputs;})
# pi-agent-stuff (mitsuhiko)
(import ./_overlays/pi-agent-stuff.nix {inherit inputs;})
# pi-harness (aliou)
(import ./_overlays/pi-harness.nix {inherit inputs;})
# pi-mcp-adapter
(import ./_overlays/pi-mcp-adapter.nix {inherit inputs;})
# qmd
(import ./_overlays/qmd.nix {inherit inputs;})
# jj-starship (passes through upstream overlay) # jj-starship (passes through upstream overlay)
(import ./_overlays/jj-starship.nix {inherit inputs;}) (import ./_overlays/jj-starship.nix {inherit inputs;})
# zjstatus # zjstatus

View File

@@ -2,6 +2,7 @@
den.aspects.host-nixos-base.includes = [ den.aspects.host-nixos-base.includes = [
den.aspects.nixos-system den.aspects.nixos-system
den.aspects.core den.aspects.core
den.aspects.mosh
den.aspects.openssh den.aspects.openssh
den.aspects.tailscale den.aspects.tailscale
]; ];

View File

@@ -24,6 +24,7 @@ in {
jq jq
killall killall
lsof lsof
mosh
ouch ouch
ov ov
sd sd

View File

@@ -4,23 +4,23 @@
"age": [ "age": [
{ {
"recipient": "age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7", "recipient": "age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBKWXVhTEw0em1CZVB3d2U3\nSWRjMlZpTlFYSXZJZVE5SWttSFV2K3l3VFJjCjZZbG05a1RsUlRORDNaRXduVHBC\ndHZ2aG1FRnJhbFQxNkNxTEwzV0NRdHMKLS0tIFplbXhERjFhRGh0YUtuVG5IZWVh\ndEV6SkhTWnUrVkFmR0NYRS8xZ05zRTQK8qcapJ2Z2AukAUSFagChNGt7BTnIXchr\nociqhWE+BVPROduihwpsQlAWxEARJP/5sqhOAZpNrnna9Psg4m1MFA==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA0cmFCRDVFcFFORVpsK2Fo\nYWg5S3VzSGlCYVp1S1Q3NGF4SDZqMVFmTFc0CkZQUDJwQ3AvUDNpMUZNaUlRTkNt\nQVR2UzI0Snp3SVZqN2phRzE1K0ErZ3cKLS0tIDE4dndRWXVycitxSVNEN28vN29s\nWnRIT24yT3Z6R1ZiYURYd1BiSzBIOU0Kn7jrvq06CxRxn7XbOIMoEIjMDIr6A089\ns1ymMJO8DmeKdNaL5s3EwG7BZBAimdmcjANST1kXFy7oc4eSk4ETwg==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm", "recipient": "age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBJTzlPdWcyQlFPSWdhVnVB\nR08yVmdaeFRNYUo2clA4ellicU9rVnlZMVFBCnRjOTZGdHBOMW94OGRyaE9HMktq\nQ1V0WmQycndnSHh4K1JiZllqSG5oRVUKLS0tIGZ6VTNUd2xmR1N1bmtDU1k2Q0ox\nRDAwQytvL3JQUjh0MHBFV09kdVpJdkUKC1CigRs4k/uSXC4zMWL60xaHFoyvzYlu\nP2olBA4CerGEoMc0ZIBEEr78hB0j//06se68BreZcD4FcDC77IWNXQ==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBqZFVRMDVVaEJWUWQ1bkRD\nWmQ1dTRMK2FpMmZLTERuOHc1OHUramFTZkFzCnlSUTUzUktrdFJyVnk0SnpFOURT\nME1wckdyUHFmeE0rN1BneUhIR3JuZHMKLS0tIGJCTHZuWTdGQVhoU3JnSmxIcm83\nc0hwNGUvVEljWWVwOWpCS1BJckhjRUkKDD22XytVN58yR22InpsUEN6y1r2AQqLn\nu1fmWC7R4AJcisHqVSEOBW3qQVQdFWIptBN/5urkb2yAKNtP7WUUbg==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j", "recipient": "age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBRcVJ0WFoxN2dQSy8xN2ox\nU09GbUd0UG03Wlc0RnpTNkpYbUo2ZFg1Q1hRCmZyUTZiYklMbnBFczhkSTNFaTNi\nSy9vb1VXOFBqQVMySHRVUng0c083VFEKLS0tIFplV3JVSmV4TmJHSkRPMFMzZkZl\ncmVZWjRCZWxlamxUdnp6ZnA3bXNaV1EK4ZXEJBXHGTaEbNq81RRm+GfElyJekrX+\nxrWHGTkJ4golaI39p3j/F8sF40Sa036ZJmQU8uX9fq1cnuSRv4lb1g==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBlc0NibDBuUVJYLzRQY0dP\nWlRzWVo0Rk5NVXh0SGZjYnNza2g3M3E0U0cwCmc5K2d5UzZvR1c2QmJickg3Uk5X\nQm9GMkZnYUVGS1lPRklrcDJRdmMvM2MKLS0tIHBiclZyTVU5emVka05xNXJTTnhF\nN3plRmN2eEl0Y012eGNBbXBsc054Wm8KIB6pNlSM63UvoaPsHNYhSuhZsfM/VQfw\nTxITGnK0Lkjw91SDVzuCGPTtZxuUTFR2q0SyGbZU9IrLQWmBTaENhw==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1ez6j3r5wdp0tjy7n5qzv5vfakdc2nh2zeu388zu7a80l0thv052syxq5e2", "recipient": "age1f9h725ewwwwwkelnrvdvrurg6fcsn3zxrxdt0v6v8ys0nzngcsvqu77nc8",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA5a3gxLzQzQXl6OThJekYr\nVE5zQ0tHd2l4K1NnZEJvSkw0WVptdE9VQ3g0CjcrdkgyUGczZTY5SFdQNFdneTVR\naTdjTUhSSzZvQnJmc2g1NGJhN3Y3VjQKLS0tIDk4ckszblk5QUFkNzVVZ3czYnpk\ncVZlTlJxK3ZLRmxZRmtaZzNkME9UZWcK+GVFoiY5qKX8DdKbGxUoLxF3gnnG3Cbl\nk/57ZH1xVJT4jCCqdploZCSwSLdGrUDGs8I4FixKVMKGT6Ce2xaPcg==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBEUUhvblozMyt0UDJpZ2dx\nNWJBZG5wQzFEM2RFTTJjb3FtcE1ISkd3cEhFCjJOdDJwOTJrdWc2c0NYWGQwWk9y\nREM0Qm1iK3FHUE54ZVI5L2k0RmtESHMKLS0tIEtRdTQvY1AxZHhSZWlhSThFV2FN\nQ0wyUTRaRnh1cDVlVmNjekNuNUN2bUUKOnTcpcaN7cuQXceJmPtKJj2pyIxC3Lj7\npedLV126Q07i3aRrQgm92O8NW2lMaM2Z3FldooTsQgIieQeMCJ6G9w==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3", "recipient": "age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBxUHJtZ3VuRVZnU0RXeStx\nNUQrakgxWm5oOFlmbXZGL1UxQnhvYU1sUkZNCmlucWQyeFJDSFJZYWlSbkd2eCt6\nS2liQ2g3VXpING81OHRPUGRocnNqa1UKLS0tIG9tdXdna3RhYm8xRHRsS2tQYlUx\najVwbndFMDUyVG0weGtGdWxpQnpsSVkKdTnOnyKDkDfDtDA4jdF1JZ6wXdf/Rucv\nT4YYvjbh7JTnOl6wMg37uy29GVlI0hjOCMyX0j2BmKis/awCJ4bxUA==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBqUVhZanBIR294ek1keWtZ\nM3puaC8zWGE3UHpmZy9vbzh3YkRyTW5rcG5zCmQ1SnR5NzZLcXNZZVUzTWV1YkFn\ncE5CaHpSeEZnWlNkak1MbWM2UUJJT0kKLS0tIEl1T2lsYndYeHkxalpBYUtwY1Yv\nMGJLWWEwVW5lNnNwNEZhTnRMTVdJdmMK3mg0hzN6KjmR288U09USPmyoQd5ixmCY\nOZb6qjx1O9TUmuho6j+zR4BmMcZ9kIv6uEuURKxGFwzZy+l5a3e3Cg==\n-----END AGE ENCRYPTED FILE-----\n"
} }
], ],
"lastmodified": "2026-01-04T19:45:21Z", "lastmodified": "2026-01-04T19:45:21Z",

View File

@@ -4,23 +4,23 @@
"age": [ "age": [
{ {
"recipient": "age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7", "recipient": "age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA2Ly9XQlRDZzgrY2tiTmRi\nNmhBNDQ2TDV4eGZsUUFvTmlHR2tCN29IUGljCnBvb2RKNUdHTnEwQ1RzS05uOTVK\neElUUmdRdnRaYUpIWEljdlN6REgwUm8KLS0tIGpJRVNKTkR3bEcrcks3ek5LNVV3\nYzhOUFRvY1RPYkM5V0hXenFHd2NSdWsKokyuyPXjMAJIfyeWsBHGWaLID7Oorc9d\nQE9Veh2aramAH1xb30BVLa40Dpu0jI/Wgxoo0iDf5OypNXBiBH+FMA==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB4WmNWMnQ2NEg1MnFvOS9n\neHBnMHpvWllvM2taeFM5SXRVT0wvMTRaMUJFCnk5MVpGRjN1cUxiYkdLSlZ1aTB0\nTkduQkFZcjkxQzUyWHhlQ0tFRnZFTWsKLS0tIDVLdXdySWxqb20zZ3RKSE1qc2Nh\nSC82UmxGN2VUY2pZY3pnWDlKTkRKSEUKFSQqZIzGXpCzpWVWko7J3hxglRJmOuF4\nmE/lC55uekxcochjgPT9uDJfG1/kskWyJTBb5Ndx3YFkUcnfVOZc4Q==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm", "recipient": "age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBMNUVHeWg1N1ZPKzBqMnBn\nY2VaTlp0K3ppbFdmN0JuQmRnVUlzQ0hMZTI4CkxORVJ0NUIxVXZMcHVRcXFSV01l\naFFVOEtJSFY0MUVWOXYxbFlqRmNuVHMKLS0tIEYydDB1ZHUvQS8yTENmZkZHeXFV\nU1N3ZC9zNEtZSnE2Wk9TREN3Y3JwNEkKyFqj+1kGjqKyslllunUl8dVQwFAe/MdL\n5PNKIfVErxbnPtlc+yfRSWsfOQPdNV9GVM9rtOxA+51QStiGlLU12Q==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB5U2plZGxEVy90NUMzellJ\nYVVsTDUvL1o5OXZ4YjR3c0tPY2RjQWpVd0M4Ci9kUFA3OEFoRjlUa1g4Nnl3K2Rq\nNHA1NkRtVzdXM21QNTI5ZFJsNFVCMncKLS0tIFRQbDdXeWxIdzJpNng0QVc3bStT\nQ2VPNFA4NDlmU3hscjJ0MjdGRGQxakUKM9H04Ezw/KoAJVjFB56v0vjIHJiO+aW+\nWWdBlsZoeTYChkMGcQ74SaLkPMEkD3GU8Ua9RZBIqFzOZwsOdAkWBA==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j", "recipient": "age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBTREx3SU02RU1adU5Ielkz\nVUFrdUZkQzJwZG1aQlZEcDdxWXVFM1JtRHk0CmNTamFhcjJEMklYbkpxWHFzekhY\nQ1E1STZ3U0RMNkNnVnVvWXJkZUhxRm8KLS0tIE1ZaFBORVRMZkYwNnBkTjFxSWNG\nVWJnYjgzM3IyMDJ3Z3o1QkV0RGdjL0EKQ1G/uJ8HcVDMUk8NXBC0MRcJOyLzQRlT\nL5Sf6Es6UYrTp5tIWleR6u48aUGi8HvcOOeCrK2S5utj5xPl6E+bqQ==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBSSTNhc0xrOXRXOWZVdWNh\nNGQxNFBXUHNrL2NsMnBkcDdSVHVYc1RPM25zClh0L2FrMEhZcWt3RGtHa0ErNjUz\ndDlZaC9kZVVHSkFOK3dBWGwwZStQeUkKLS0tIE1VRnJCbTFWUkhoQ0c0dkppbVRL\nSDZldkdXV2U5Z3ZycmEySTJ5WDFTbVEKgHq5Xa86l+1e87p2KodJN+8/IOz2y0w7\n3xHjoZ8LaP7aQ5PWBTHUk6KeOjJQnTMzimQC+YOs4XkmeItBt+6Gjg==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1ez6j3r5wdp0tjy7n5qzv5vfakdc2nh2zeu388zu7a80l0thv052syxq5e2", "recipient": "age1f9h725ewwwwwkelnrvdvrurg6fcsn3zxrxdt0v6v8ys0nzngcsvqu77nc8",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBBT25zTE9Na21GbHJOeVZw\nU3Uzck0yN0J3Und4bExsb1VGSkNleFdZbFdZCkx1TG5VU2RyZGlWT29wd1QxV1Rs\nM20rd2hWN09wQkc2K2dWS1JUenMwUW8KLS0tIGlLQ3FNL1NrODVmNENpd3F1OGlt\nVDBhNThoR015N01UTGp6MDUxdHJFWnMKvuFBvzz1k8Tg1J2GrhEO2yvJEfwxYrmX\nDQ1/qcoSOWde7N1Mvx9ih2mFlK4JiD51tWF351C0VOKNdnBBzO46VA==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBybnprUGZPREdpZzllYjMy\nTU5DVnRrTkxEREZQUFpiM0E4SUtsSlpqOGdnCktuaGtSSHpzTnl3c2ZuMlV6MzBo\nLzdzYWhPR1RtNnFmdVg2cTUwNE5CNEUKLS0tIHRHaXlnTlFIU1lVT1dkVUdOUnNG\nWnl1OXFpVmo0WkVlSVpKYlZaMC96encKNBnnwC2/5n2ZWYvRRoCQjOPg9b3+fFJj\nhJA45nhoO38zg0Wdv4OPA+Zrw0K0lgllk02BNXMDSv8+IthlzN5F0Q==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3", "recipient": "age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBTYysrSjZSZDYycmtNa0h6\nZHM4VGpzdDQ3ZXlYWG9ia2F2RXpOOTVnMlcwCmpTazNzeWt0RVdPT1d5b0hqQ2pp\nd3BiU0poRm53MGFFakJvYkhxVklhWGcKLS0tIGdncmxRZVB4dGVzOVMxdVM5MkN1\nTkVXRVhqbXIxa1FERjFBYnhlTkJsVmcKOUwliiZ/tMzelHQGCYi3dW/Kq0rKqW7k\nz6kSVA6vTZGFTqiK6uLqYL56BToBY1iWM4skUYTctrcXO1eEGwnbfQ==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBTeDllU1RoL3VndEpGMG8z\neFl0TEtPL0VXUXFBVDhwRTV2TkdyaDlRS3k4Cm1hZHRDUXRFT1RPajY0Vm9oZ1BC\nbWhla2swcHdLdFdRK2NTZ1B3SGgrVjQKLS0tIGhpNE0yV2JTeHV4emdmSU5PM2ZV\nWkh5ZzY1eW5jSVF2N0Rndk0vQzlpam8K9++sBqLwmDs4WMhHADbSwlUd1te4t3/S\nDk9ENUmvu3xRX8cpT0VELjfXRf3ne6HTbG7K7Lxj5jbmfX6aSRpIsg==\n-----END AGE ENCRYPTED FILE-----\n"
} }
], ],
"lastmodified": "2026-01-04T19:47:06Z", "lastmodified": "2026-01-04T19:47:06Z",

View File

@@ -4,23 +4,23 @@
"age": [ "age": [
{ {
"recipient": "age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7", "recipient": "age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB5SVUveHVXUDVTakJ1QUlj\nOWlmakVlY3JucVJGaVljVUwrL25MaU5PSGxzCk02akVDWHhjUWs0aFZrdVpjUER3\nVXFsYnJCWXFSZFNrR2tLSzRBek9mRXMKLS0tIGFJZzFFV3hkeTdmYWFKWVpnWXl2\nbm9naXJiRkdSL1UzTE9xQXBXR0xENGcKxYnDk+n4u94whP0mNNSxK3KsY8RAq+w4\nbVaxdcomXo6NJq1izMdVkQFIp1hBZniaktPGsDUMWLpJ4mSrVwPbPw==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBQRlFyYjFaNHJ0RDB1azRW\nY1ZvdnV5MEdWb3UwQnZuZU9sT2JleElkNmdZCm9CWVB6V0JUTE4xQWtYTU9hb0N2\nVUVtdXFVcHlTaXJBNlM0SXJac096dmsKLS0tIFdEdDZlTndXVmtTbjV3eEVsb2xG\nbzkrbU5jY3l1d3VFZExsVnB3dnFUdU0KKpmLWWgxnUfH62N0ibnqtGjvtRC6/2q7\nD/h4VozD7oZh1lC1S9SDl6J7j54frJ//AtsDdbIviVNTg4JzJ50kwQ==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm", "recipient": "age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBmWGNtM0RGTHR3M2c5RnFt\nVjd1MFM1Q3hvK0xid3BnZ3VWWmpySlN0WkRvCmM3WFlUajBaN1B4cEVUVWRUOS9M\ncXVQZHZ6a21xaG53UzdKdHR5Y1JJeU0KLS0tIHRTRHpZbW1IQ2FUMDBldUtxRi8x\nN202KzhjQ3MxSERhZEsrUm00RUlGZG8Kz0BdZXtPOGe/lwiSrecSho9zPn68TZ5P\nkH3YOpAHmQZrm3P876fg8ChMv5Rgd3F679d6h334fwZDfiu2h9mFBA==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA3WWNCR04zV0FhT3NhNUQ2\ndVN3am4xY1oxa0tXOGlLdjFMU0cwQWo4MWlnCklPRGl5YVYyMTRVcGhhNVRSaUdo\nYjZmbTRTaGZ2SFB6d0tUeEdEVFFBU1EKLS0tIHZDOFRDTkhqbG41MGNYU2lkc2NV\nWjNYV3kwRVFrWkdNZitYZlpnUlF3NGsKne7cflZE3HpP2TbtsSSQfOLucOq16Pof\n5vQ7M8lIqi0jqL/k+siWIspilbhpkknTgUzcBsui4FxKCfEPxLFMHg==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j", "recipient": "age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBLVmRUczBEQ3RLbXByeWpU\nN3EwUm9aTXRwTlp5dG1weEpKOENXTkhrMW1FCmllRUFTZGdhWXRhWUp0dG03TnFu\nN1ZjYUJ6ZGVYNnVJZTBlcTFGUkNXZ2cKLS0tIENLZmo0aHIvbjNxbllZa2tkbHR5\neHZiRjcwd2VGbEovYm5HcEdHRXRxV0EKx6A57Usvle5ItD3XysZI41M/9s9l719i\n1fpP9W/5i+dDCgE6/ui8WLVpoj62eaArFBEN7OH1Xt04DkJ/G8Cytg==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBvYkxnekd6VnJ4MEo2d0tx\nSlhmeGhPenQzSkJLSkNzWnB1d2pvTHI1QVJVCkJRNTVZeWpUV0dzNXdMclNCUlVW\nQWFoa0oxMkNBUERVMVFGdG4ySlV6akUKLS0tIFhRdkZ6ZWdqVS91U3JPd2p6OENj\neHpCc3UydXF4SFZCSTl5SzcwL2dwVUEKLT37+0UdWfe26cyQ5gRwubqN8nBv0iwp\nBRDwEBw5H6rgzUEDaAv3fRy/AxT1TvtiavuKhlJdCsJqDpJmBjgIRw==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1ez6j3r5wdp0tjy7n5qzv5vfakdc2nh2zeu388zu7a80l0thv052syxq5e2", "recipient": "age1f9h725ewwwwwkelnrvdvrurg6fcsn3zxrxdt0v6v8ys0nzngcsvqu77nc8",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBiMEo1NFhoei9vV3hlaDVo\nZjNteStQTzl2VU5WaVZFaEpmdWpzUEtVNlFFCjVUREVBZ3ZxdlBBdHplRzRxUGph\nYldyY0tpNUVqclh1TE1xemlZYm5QeGsKLS0tIE1oZ2VYSDFqZTNvK25EclRPTUZT\nbVIwdjFBQmRZTEhISTcxM1BRSk9iSWsK5/xZkLijy4p/N9WygTCIxlH0SdIiIz5W\nkGqvG27RZGqR3qwNn3vSoUBH5yOzhJAP9RAvAu5EET0jwuuG/3L0Vw==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSAyU1pZT01HdkhZQ0FSbXdS\nQVdlTlhlcDcvdlZZcnJJTVNSQ0Z2NTZueVVVCmpRVWR4aTF1NDZ3NUV0RkpRMjMx\nY3EzYUZRSUtMVWtsVzlhQ0MvVmgyN2sKLS0tIHIzOTUrdEVWQ1l4RXQvV01IUy95\na1pvK0pLSFRYMGFyQjhCSDJmaERhSGsK1vg2a05yGfgp1Uo12+1nCv6cN4oU1s6h\nwvrCtejM1z79f5ZGJaAK8Tz/KOKQIA402QKp9eTWf5Y4Ix6OUd1t1w==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3", "recipient": "age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBOMExJbWh4MldLckw5YjJK\nbGJyLzgxU1RUQldGVXZIcDRZL3BaODRDSEhFCkQ3TUszSWZHbkRuV0RCNWFaZDFl\nUmQvVlVydDMwL3JKVXpoNG1sdGtFMVkKLS0tIFJGK1AvZlYybVJIS0d0TlNpVU9o\nVkFXK25Ub015YWp6V3VBaStkRDM1RjgKrP91fgB60MlnXjKs9jvSnkUlSHIU6niJ\n4VveJwqCXO9zrAcS2tZaQFQ7uwga1r+iKflzLJxMPsbvshj4sfLsrg==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB5a2YrMjR3SjFGZ3dtb1JN\nNGxCcDBTOVVVcHVyYzlXblBKR1F2blNVelc4ClBDZHdqNTNKckpXYzlwVFhZVktk\ncExnNTNka09ZK1lWclRudnU3N3ZDQlkKLS0tIENBR3NwSk9BdFMwejNybXYvN0tV\nU3VEVC9lTkxJbFZXWUZDRG9QMGdab3cKRYgccR5zCKEZW08FHb8/PTibpTPChG7o\nHT23XeVAf524+QpExJKsQX4Qpr+2UEG3/hUoRaisuS3TMyRUtKRoMw==\n-----END AGE ENCRYPTED FILE-----\n"
} }
], ],
"lastmodified": "2026-01-04T19:44:05Z", "lastmodified": "2026-01-04T19:44:05Z",

View File

@@ -4,27 +4,28 @@
"age": [ "age": [
{ {
"recipient": "age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7", "recipient": "age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA1SktQSmxuSG5MY2ErNUEx\neEd0Q1RsSzhaUjgyQnViQ2hhd09UNDFzcVJBCjg0R0M4ZnB1UFBEcDNJQXFjWnlO\nQVhSeURHVnorcUVnczBtdU04WDhRODgKLS0tIEJxNm9teWIyUXhzbU1EY2l1WVBk\nZG9xUlh1cDhiQmdsYnZpNVNOTUY5ajAKPyt8ZIKTfu0azAFezj7rtSJX8X4rO712\n0w8MAvnLM8k5ij6nJtR3HylwLmZ9AfMSq4Aikl+oRu7rXs26JvPbZA==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBJdjZ2ZEcvM3RFbThNL3VV\ncHlOek8zS05sOGJGYlJ5U0J2VDJqaHA4Vm5vClB1b2NNZk5Lc2ZscDdnd0k3TGZS\nWHllM0l4TTFuWWdtUWtidDVaRU91M28KLS0tIHh5UDZiVXEyV2lYSTQ0Rk5peGx3\nNG00OFhPUWZQamdEOWVGZVpFSkc2MFkKSPQ+6c1AyWObFZQcoH5ImtFNl8lILz59\nao3ZQ23yIiSYAZ2pAeOPh4WuNtguZ5S6dnbRDr4e/8Vzw/1qgU994w==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm", "recipient": "age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBSWU5qSDBNY2d2YnNvVHU4\ncElNT0s0R0N3U3pjYW9kVGxFV2thY3QzYTB3CkdGRUhVMUdvR1dwdVdnZ3o0M0ZH\neXV0VUZyaFRBbEN1RXR6RGJ6RmIySjgKLS0tIGZGc3Voa09CNDFoMXVyZTJmME1Z\neldyMVAyd3pTZzB4RVhTRzZVOGs3NVkKyP8sIk/Oy1GXxG0tw8Ocjerfze+eIrNW\n5XYA96ct/2M2jiPdTxg2yEI5a9wycDkzNIzE95Xyfl3LkY8864wAMQ==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB6Q29GRkdZdzNjUDAzUWU3\nTlRvUm5URWd5a3MrMDE0WmFsWFRZS2NkQ2s0CktHZHpOaUFnWUhHUDdWK3FPMi9L\nMkhweU84OFp6bFhnME1hbk92M0pGMUUKLS0tIGdkYjN5VHl5L2hlL29wa2p6VU9t\nVzluZzdFNnRuL0dNUmtjZ3lOQTRCL1EKvvrXH1IxL4PVIZam2k9XRNBTLKahNCUh\nhMdfb4shNBtUs1bAiicJRPT1eAAWk4CWO5xpS3uJ0fODabrOozXdnw==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j", "recipient": "age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBJVHF0N2hHTHprQlpSYW8r\nMmpIZmZIT2QzNXYxTE4vc05wcXhTQzQ5clg0CktzRGFpeEVYMXA4RnV4TVdJNk04\nM3ltL2ZyczloR3NNWm01cE16NmJNemsKLS0tIDBHczRiUVhnZHlrTmdocmNQY2NK\naU5VYWZ4QWFuK0h4cUZGOGxUL3QzQ0kKtsuW7yl1/t7q9kUhTtK0G5G950Bi5n5w\n7cxX/pfMtgPhOh3NMoeuTxc9sH4pTIthRmaLVJ+GzEc4KsMJhOp+rA==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBLalFBanFCMWtaakVmQWNh\nc3Q2UmZ1OG1MOTNaTzFmcy9lbEFZdDZMSEVzCisrV3hMZndQekZtR3ZORjRkWHdm\nZG85M2Y1L2tPcWc1ZHpkd2R6TDJ2VXMKLS0tIEZ3Zmk0eWVsUTJwNHZJbWJhY3VJ\neVdxMVdqcTdxeUxhU0UzYThhOWhnc3MKOkedT3Tifi+6Q+C0ur1+TQuVwq75fAUa\n0cdXexCI2UIyLv+Ggbv1YIYcUiNeJNiF8JttpGAyOoVnUfIUcLsY9g==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1ez6j3r5wdp0tjy7n5qzv5vfakdc2nh2zeu388zu7a80l0thv052syxq5e2", "recipient": "age1f9h725ewwwwwkelnrvdvrurg6fcsn3zxrxdt0v6v8ys0nzngcsvqu77nc8",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBLK0Z5UHpwdTFRM3dNL2FC\najdScHp1R1pHM0ZyMk1reENnTFlnTldVNTJJCmxVcVY1OHlrWExIeSs4bTBwQklE\nU0NkUnVmdlVuOHcrWjJpRFU4WTc5ZlEKLS0tIFhheExxWHBIcDBqL290WEpJK0sw\nRTZKbzRWMmJhVVVGT3A1UWJQUE1QS3cKp+jmuHUvZKbPx+/gxQUSz7QV1jLuIzP5\ne1jkJ2rJT2i8snAvihd8bsjRSFmoUnEg6kV8f0OteezNbkZoNhd30Q==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA4RDZ1K2RUaU4vTnFsbnpM\nRTRCUkg3RUFGS0pnK0NJSWxXTWVsaTZ3ZVhrCldDWkd4L0Y3Sm5uM0FZZnZqclJq\nUEpNRElGbEUwM0ZHRzJWNG0xT1ozZzAKLS0tIDBiOTRJSlRRbkxuRm9aTGVoZEoy\nU25iVjhxTXhvSTVqV2kwb2doMXNBeWcKK1wDpzC6aYSVJ/z0wGvSDjgXDiPeW+t7\ncgD9a1nCB/N1QZQn2IbnnkL6EsCuIQpRlzlLL9FhdRdLP+3ij+i8eg==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3", "recipient": "age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSAzRmpkdloxdU1DRmF1Tzg4\nYSt3bks0Y21OQW5DS2JRU0ZMMHZJV2xLc0dRCmRORFliS3A0QTR1Uzc0ckd3cHA4\nRnNrdVBISG5NcjhrNDRoUnl4c2dPL1UKLS0tIHA5aW9GQkdXU0VNRHd6aEpoSzhJ\nSmd5OU1ESGJqMFVZdGhBMkdYTmlsRWsKLN36pDsdf06Rn9RLxfh46nX5u0dfyoe8\n/VvQiaWoj2/pv8NmwdFzdJQ0mTKkvEdxxY/Jk0YK+GQA/NGIVIIoWQ==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBkN2kxR1BSUVBHSDhuYVVu\nREZGeHFNT1lUVGljMUpmV05FcHROdEcxK1hRCjhYYkJ1U0JaUkdRZG1tMXFudlRM\nSllqZVBsd3JDemFITVVlWkZ4Uis0NnMKLS0tIHRjWHFzZEgwSmlnWFVpQk1tcnJC\naytMVk92U3lGbCtWdXA2UDFXY29Ndk0KPmSHHP4Nyb4dkyz9a60OP5Lc8MlbPGxo\nLReOVEJ86yGbL4ARFptqrpcJ6ILEagWSYLEkqoZ1xiG1BSvs6J7dHg==\n-----END AGE ENCRYPTED FILE-----\n"
} }
], ],
"lastmodified": "2026-03-23T09:09:07Z", "lastmodified": "2026-03-23T09:09:07Z",
"mac": "ENC[AES256_GCM,data:phFpHUzJ/7rd1k1fr9YFD2FplXV3Qv5zFni00fAgG2VtVoIdFYeNRE0EEh2ulnKcIXjB/5lZuMss2bIoBt4i46BB2ZHTpnWksbeHowdgkHL+eXT1F7b11S1y9NEKc/ug3jarPwyj3usmVQJlllAzANCQHGrYQdBrFXvFae3cH40=,iv:4v3k4q0SxyTvHoqr2Abf6OhAcANCT9oWTa5Kwlb5GCs=,tag:Hn+fUEmOu7fWc7SSBe5yfA==,type:str]", "mac": "ENC[AES256_GCM,data:phFpHUzJ/7rd1k1fr9YFD2FplXV3Qv5zFni00fAgG2VtVoIdFYeNRE0EEh2ulnKcIXjB/5lZuMss2bIoBt4i46BB2ZHTpnWksbeHowdgkHL+eXT1F7b11S1y9NEKc/ug3jarPwyj3usmVQJlllAzANCQHGrYQdBrFXvFae3cH40=,iv:4v3k4q0SxyTvHoqr2Abf6OhAcANCT9oWTa5Kwlb5GCs=,tag:Hn+fUEmOu7fWc7SSBe5yfA==,type:str]",
"unencrypted_suffix": "_unencrypted",
"version": "3.12.2" "version": "3.12.2"
} }
} }

View File

@@ -4,23 +4,23 @@
"age": [ "age": [
{ {
"recipient": "age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7", "recipient": "age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBhVVBSMm5hUmlnUUhrSmxL\nNVVZY2dZajlKSWMxZ3dMVlBORjNUaHBOTEZnCmNpNFRodDFFb2xNdnlYVHRiK2ZT\namV4YXFOd3FCK1ZiK0NIbW56Sm1wNm8KLS0tIHk5TVlxZHB4NzFiQTFQeThUY0JG\nV0huc2FzQU9WWUFJWUFNS3JoTzFaTjQK/T7aFSWnkpv1Qbx47LFp85bkaCOt5vrs\nLjiAGimUOVkr7ZcJz3540JWvBBUNNRKmM3QDNlgPg5luI0fa7+pNRQ==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA3NmNaR1dvaXA4QUVqbkNP\nbFlpTzlHMXd0SGNsd0wyZ2FwZDFBR0tuaWxnClZuNzZma29BWURibzVUZndUc2hV\nZUpUdE94TC9Pa1dLQitLbE1qRXZJNVEKLS0tIG00ZGJyNHp0eEtQeDNsemErNVlo\nclBid2Y0TysyZ1RtK3F2RS92QjJJWDAKUruZmiTB5tkBF5VOO7dv+GijgAO6AVNe\n6t/xyot/Gc3WFOeNkYUk1K7fo0oLFfRG1tZBW+4cwFi/FMUpyjCxig==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm", "recipient": "age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBSU3ZFZkZqUjcycmtYalRw\nYXlSQlFvcXFwS0xYQ3BVUXlPeDBHeUl0R1J3CkRGRHNaZWxQN1hISmh0VzNwZnlM\nbUFYVWw3dU5jNVorOVFKYUtyWHl5Q28KLS0tIGU1MkhrNHptZUZldHM1Y1FZTytV\nY0VvQjRmUmpIbW12WDlkLzVwSE9uZkkKMczm0x03kPRAD+pdXyDRTqaQckU4MKxg\nfIJAAwRAbskLF37SUzvefa0DCdKpyK876km7nOyIZunTE1j7xIwMNQ==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBKcW5Ld3BjdnYvanJYeHM4\nNk1vS2o4WEg5Ni9aUlBuNmRKMHJpOHh3dTJnCnlhTFRDdXoyS3kyVEM4Ukc5N0pB\nazBwQlIycHliaEt0Unk3WUZtcURJRXcKLS0tIEpoVHlFLzBCSDdGRVZSeCs3SW1n\nK3k4T0VkeGhBd3NSYlNYT3JudXRQVWMKPu0HGvtYlgdih3vNrqvEvke0Fg7hr9xm\n1f3G7GpeRBLZlab6UKKzbFFgf0NADnGEBmWMP7e9taVYqYy1FFf7lQ==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j", "recipient": "age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBnVE9tNEhSRUFydEhvL2tu\nbWVuVXhVYlhTVVFnMTZrOEFBOEZKZ1A5TFRjCnNSSGVjNmtYUnVsdGZRcjg5SmNW\nbys5alQxWVRRNS9rV1V5aU9nNEF5ZHMKLS0tIFl2MjgwclFZOEtvVmR5TDFzL0lm\nblBnekRPRzhqem1CSEgyM2YzTnJvck0KwITv/a+Z0XQ0FeI7rPti9IGZPuILWVhC\nTC0dWuJ3uSflAxgRvGpaQyvsKMLgkXCRf58kx17cHjPT+Z8EsXeBSQ==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBQUFgzR1hFemRNeHorN0Fm\nNld4eWhwMWJHdWhBYVQxNUtGemlEeU5jcUcwCldlSENWSXBxYjN6TXZUM2Nuc1Mr\nN1Zlc0lWY29KcGI0bWVwYzVnS3ZBM2MKLS0tIE1rVnhhNFJkeFZjeDh0RVM3eHU5\ndkJPZm1xQTdWZTdXNVJrdHpXV1dIWVEKZlyGzrce2W4M8LVDbYGXIpK+cVqqB86t\nqibOGTNdSAhd2Y+IWb2FdPr0uxoixC4j6Mk+Nw2bXhDKXp+VAAlWog==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1ez6j3r5wdp0tjy7n5qzv5vfakdc2nh2zeu388zu7a80l0thv052syxq5e2", "recipient": "age1f9h725ewwwwwkelnrvdvrurg6fcsn3zxrxdt0v6v8ys0nzngcsvqu77nc8",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBxdEpHdzlVbDdBcWJyNDJC\nQnpTaFZLMno1Q0ZJcXVjMUIvcTZ0K3lDY0RzClJlbHhqbWNVemFpRFh2WXRGV1VZ\nRENTV1ppN0J5SVFDdUR1aDRRdTFKUUUKLS0tIGRFMGIwVkliRitaeXdHTm55QzQy\neGdzYStQalVFb3ZPdkp2VFJ5SUJ6RkkKeeOJ1MjTKqevHHl+5dXL8n5o05KV6HvS\nMf5yO3rxYDLzO5ore63G3KjqCS9i36mTzLoyJgYIKQK4IAh/AKcI/A==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBKN0x6QnJCZklFTis0QVJE\nbWdJZmxJWmxRYUtNdmFmaHM4bTFQOGZENVY0CnkxbW5Qd3lQTHlNV3dEWHJVS0Qr\nNUhDMzBmdzI2dzNGY3J2dVJwd0wzYXcKLS0tIEVjb3JNODY2ckNUZDVtN0VYaTNm\nSnZWRUpFQkFXWWRBVVNVcUVNYjRxb2MKGa0tBfwyEWrXmkCIupwHID2j357UMiSe\nsg70OdmszMZ+MDjcLIXVkjQfrLTbtIKU+u3jA7MDzECZTm+nAk8cTg==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3", "recipient": "age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBxZE1lU0UzUFQ5Z1ZQRXBj\nbktnN0lWNXZ0QVhPS1Z5cFNPeDJlVCtLdWlvCmFPSHdkeUlONHptbE0xcjg5SUQ2\ncnZKY0JWY0haZFEzcWNnUHdnN250WmMKLS0tIEhlbG91K0tabmVVSDhBeStoUU5j\ncFRlUk5xZjRRVC93UzNJUUhhTVQySlEKB2uBSMXOJd1ufB3i66ldhXlnbquWcXgi\nXjuJb5ud4Nz97wlSnqAADoY8V9aYJM30byL4VJeXiMq1oppDptbQzQ==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA4YTdqUXp3eVdmV1drQ00v\nMkxFUEIrdTBoRDREbndMc2hkekdWZDlSSFZnCndmMmtvTDRpRjNvUENQaVY3S29k\nRGFaUStJRmlSSEJxMG82Nm1Oc0puOU0KLS0tIFdUdEZNR2tZdlBoTFhjK0pNNnNW\nVGZIRytNNnFOQXZMTlhHMDRMUnJ6MG8KDaPfk8dcE3ijfwPhbeN909I6zPdjxX1M\nFnQiSGQuKvWuVzn2qjl8AAFxw1nVO/3l+7eu/PN2yeKIb3rMUvWpDQ==\n-----END AGE ENCRYPTED FILE-----\n"
} }
], ],
"lastmodified": "2026-03-01T17:47:46Z", "lastmodified": "2026-03-01T17:47:46Z",

View File

@@ -1,30 +0,0 @@
{
"data": "ENC[AES256_GCM,data:qZCh11bq1W7FwXMrDX5KMOQFsgsKgbhimZ4TDNvv1BDU,iv:PJJJB5uyhuTUSA4doQ6h6qMbmPgerPv+FfsJ0f20kYY=,tag:lXpit9T7K2rGUu1zsJH6dg==,type:str]",
"sops": {
"age": [
{
"recipient": "age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBvNXBjN3RZcGd2R2MyQWhR\nenBpa3VHMFhkWDEveUtmZWtPSk01QkhUVFJFCnRSc3ZGdFFjVDJnbHpwKzZ1TUdI\nZUdWQzM2bmZ1RUl4UVpCbDJoL0RkQncKLS0tIHRxUzFiaS8wekdCQ0Z0dTMxSnZ0\nS0UycFNMSUJHcVlkR2JZNlZsbldoaUkKe4EaYIquhABMEywizJXzEVEM1JbEwFqU\nAmQ6R+p4mNgaR5HCrnINQId3qqVfsP2UDqPDepERZIA0V2E5h9ckfQ==\n-----END AGE ENCRYPTED FILE-----\n"
},
{
"recipient": "age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBBZ1hYenpVTm1lTFdjTEJj\nTUN5MzNtbzdWNzQ2VE9tRlJJRVRYTUtLOXpnCnlLWTZPNGE5NDlwRHhWSnlTNUhv\nc3VZVklEZDB5dXlFc01wcEQxckl0NjgKLS0tIEE5T2JmNlJaYkZpWkhYdDhPSTlW\nei96YmhUWUZ2enVnRjhKOVlNZmNHa3cKxaHBtCwLDLNcscptlDk6ta/i491lLPt6\nOh/RtbkxtJ02cahIsKgajspOElx8u2Nb3/lmK51JbUIexH9TDQ+3tg==\n-----END AGE ENCRYPTED FILE-----\n"
},
{
"recipient": "age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBJbFFpQzB2OU9jYUZlL2Nl\nOEZ0WGcyR1BpSmZGU0Vxa0N6WGpCbXBXZGxJCnlLK0JJWElndC9KRGN5d1NNd0tj\nUkExQ0tTSGRKQjJHUGtaWUtKS285MU0KLS0tIGI5cWtVcW43b2Q5VXRidllzamtB\nV1IxYnN1KzdaaXdvWG96a2VkZ0ZvWGsKxdbXwbgFIc3/3VjwUJ1A+cX0oaT+oojz\nrI9Dmk782U/dQrcMv1lRBIWWtAdAqS6GiQ1aUKk5aHpuHOZeHHFjMw==\n-----END AGE ENCRYPTED FILE-----\n"
},
{
"recipient": "age1ez6j3r5wdp0tjy7n5qzv5vfakdc2nh2zeu388zu7a80l0thv052syxq5e2",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA0aTgwQ3ZEVG41eW9MQ1RX\nSElRdkdvL21kZ2ZLeGNPbGJiNll5WjdsM2gwCmJQVmJjWEJBaVhEKzJqYWlib2JX\ndWRzSE9QTVQ1c004dldzR2NtR3pvQlUKLS0tIEsvZDNnNWJJaWZyOCtYUEs1eklh\nNXl2dUM0amVtSmdjTy83ZzBSeGp3Q0UKQ/cUYPACFNcxulzW964ftsHjoCBRGB66\nc1e/ObQNM+b+be5UzJi3/gago9CHRzZ3Rp6zE9i5oQBzgLGWlJuPNQ==\n-----END AGE ENCRYPTED FILE-----\n"
},
{
"recipient": "age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBLNUk5aHBqdEJoYWdaeVlx\nOUkrSXMvRFRmQ29QRE5hWTlHdlcwOUxFRXdRCnE0L1BQdHZDRWRCQUZ2dHQ2Witi\nQ1g5OFFWM2tPT0xEZUZvdXJNdm9aWTgKLS0tIENvM1h1V042L3JHV1pWeDAxdG84\nUTBTZjdHa1lCNGJSRG1iZmtpc1laZTQK/twptPseDi9DM/7NX2F0JO1BEkqklbh1\nxQ1Qwpy4K/P2pFTOBKqDb62DaIALxiGA1Q55dw+fPRSsnL8VcxG8JA==\n-----END AGE ENCRYPTED FILE-----\n"
}
],
"lastmodified": "2026-03-25T11:23:08Z",
"mac": "ENC[AES256_GCM,data:UM0QWfQueExEHRjqNAEIgwpVBjgpd0a6DXxDeRci08qMzTypTlWIofUGMyM1k+J+mUKr3vWMe3q48OwVtUaXnbWimH+8uFEwb5x0e+ayTg+w/C23d+JJmQIX8g5JXtknUAZFNrh3wdZOadYYRr/vDzCKud4lMrmFBKFXsH1DPEI=,iv:kTx8omo8Gt4mTLAs6MoLxj4GizWpxlSXMCTWNlRR5SY=,tag:PB7nMCVxCLRQdhC/eelK/w==,type:str]",
"version": "3.12.2"
}
}

View File

@@ -4,23 +4,23 @@
"age": [ "age": [
{ {
"recipient": "age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7", "recipient": "age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA2UDczc281UGU2aTlHeEp5\naEdiZGgzK01VODE4enMrRTIzNmNwa0srRm1rCnd3K1NnOEpXVDJHWGp4VXhFTUJr\nM1c5cXVza3NQOTZjTElTcFRhY0ZDTE0KLS0tIGp1Y3d0dXg3NnBnQmpmeXI4a2NQ\nZ1lIU1hCMUJMR3lidkxRNWV6SzJkeGsK93sF6uCKKFh12hnlSZ9DMxBv8/j0LIp5\nMGeUbbX3sWdk4I04QjXl2Yi9ER5d9W3zDfrQ1u5CqM3IqSPHQ7VgpA==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA2UGRKeHloRTVjcHUyOGJ3\nM3FKajJTZXE1d2pnUXEwa25BMytSZjlZV0YwCjFBUjhtUDFRckcxK20zcmxpOVdX\neGhVaC9jaHJuSm9tRktZVlFLTk5yS1EKLS0tIG1CRkdhVW9LSU9sY3dRUS85eHFU\nRnpxdVg3dzM1bldQS0JQY2hYbkhXSVEKREVnO8OsmWfrJ19Vr7KY3O97XG2LXrSP\nxvGOZIDMI2UIfTFWm5TcFDOOD34HV0fWtzjn9GYSx9XN5fQX1YK5vQ==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm", "recipient": "age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBPcE55WTAxSERXdUZudmxJ\nODQzaWFBRXl4Ly93MjM3K211OXBtL3A1SUZRCjBjUnRmL0FVdXgvWExsM1E5bUhs\neHAvcEwvUmxDeWFoMWJ4RDQ2VEpzcGMKLS0tIHJUNWE3Z3drMVYwSkV4Tll5emxN\nc3VnZFllaTJLckE1M2ZYaE4vdFNTSGsKvQhxtlSjH9+tjaT77ZiuMQeb0RPotSeS\ncoYf0R8TCvy8c2CP0YYs99bIJP1zx4RirlC/80Ji9eXBYkJaoDRkew==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA3bzdOOWRzMm9BOVNoaTkz\nbW1YZWZjVkF1MHhMcy9nTFhPUjdpamRLZmprCmtIK0VxNVNSR1MrbGlqakZzV3Jw\ndE8yZ2lyOE92MStpRWExUENDSGNCbHcKLS0tIGMyS05FUlk1a2N3WmFQcGdpZytD\nRWxQc0hucmxCK2psSzMrWmt4cTlxMGsK95lvBRK4daCbixLEJkEToAg7/yL2iJ+j\newP0caUe3E9UyhuFmZCiN9VsYaQy0q12KcSTq/KMybj6UmDThaOMRw==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j", "recipient": "age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBMS3dYenBFalBySUJKcDZl\nQXRBMTFXN0tCQTM1bFl2SDNma0xRZnBJbG5nCm5hTXlIZVVpREpLUUoyRitnWWZJ\nZ2VPSFBBK2h1aFZldGRNZm9JVDRvd2cKLS0tIEg5cHgrRE1sS0lFZkxSNHdweXJz\nb3p3Y1lZUjBxYWUxZjNBbDlFUkd6VVUKLaUOi7OESO8yTZyBcgOt0LhjM6QKMwlm\nl6MSVJYBtmcp4wdZdaCsiC28dlHg98qXauLppgnkda8LyhDUliP5lQ==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSByTnVqbkNSbW1nNUxpNlNQ\nekRWWEJBeHdmMG1jYk0xeGNUMnBlSk5xNVRnCmNhTlZEcGtUbnlNaU12MGpVVDhp\nOHRRbkVBZXg1bVZiNmtXVHlxU0dDL28KLS0tIDRhV2R3akMzZHFZcytwSHJ3QU9I\namhWREJHZGcvMk9DeUdsb3FtOW1wUzQKhRDjz4lv9BPjOl0ZUstNlMVAw5x2Dgql\nQ06h5a1qm6YqktDvJLiKiUA64ZqDFHvB61x5qVn0Wc913vLayoEIwQ==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1ez6j3r5wdp0tjy7n5qzv5vfakdc2nh2zeu388zu7a80l0thv052syxq5e2", "recipient": "age1f9h725ewwwwwkelnrvdvrurg6fcsn3zxrxdt0v6v8ys0nzngcsvqu77nc8",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBkUTJEcFp4dWxzR3BuRnJw\nNy9vcStGT3ZnOHBYczZSUHgvN2Fzc0MzaG1nCjdCNXRnTHN0YUZZKzBPVmI0UTBn\nbnRNZGxyM0tMSW9HVG1yMmxQeUFXUHMKLS0tIEYrOTJMWlZndHNxeHA4U0ZFajJM\nclRPdVAzWThwNGRpSzJkU0pxY2llbnMKpMQuw76xRJ162EJ91ui6jLNeBY6+XCiG\npKJB9YCTnh+JlxFzm9LU5s8bI4XzsywdrdKMYck1G4A8NW4MoQCrig==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBFdmIzR1pjNnBnckpEMzcx\naXhyZHNXNUZ6SFdnSFRoWWIzUVFFNExvSXdFCkNXQXhDY0h3UURPRlJSeGFrVC9B\nblFrTitZVllxYmVwbEZRSk8xelpYY3cKLS0tIExkeVlMNWtjZ2VkL1AreTRHUXNj\nYS9LNTNLd3BhUlpNdWp3UUdNQmhLYkkKpYAbuft1WSROAm+3iGvU1TRhBP3HgiB8\nuDOdv9NTw5AhhTK2VIixhJHZI3gIAvCwndmQWV41PDgoDqmeVwxrsg==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3", "recipient": "age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBMWnYzbWVNM081WnJ2MHh2\nbE56TUZDU1JqdkJHczVaUi9ReTFsNENRdlFnCnArZEYrcmtIMFZEZVhUcG9MdWNO\ndVRNS0w3S2FqMzNkOGVqcTJwTFQyK0EKLS0tIDRnSjBaRC9xd0ZsUy9CWm13MWZu\nTXQ1WEdZR2h0UGM0cmZOMFZwdzdzd28Kv+KkGEfWvQVgOtiJMqEwbIgQcQI9U4fC\ngh+9QrN8blwbt0OVqyu9tPWrP9bPOEhM6U13wj4Q8BqzrNVsLix4Lg==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSByVDBtZzJibHNpV01WUTF3\nWm1OajJnVjF6TFluK05UWXptdWJFekY2Wng0CnliUGhmTHZZUHNRY0piM0ZXT3J6\nc2puem5VbVczOVlKZ3RqM084N3JCMEEKLS0tIHppbHdvYk1LTXRMeXp1eEZYeGd1\nT0FJZlNJNnAvQWNGcnBCdnBNTTg4UzAKTwMtaAQk5/8qGO4M3kmjfP8yJmrDiWiJ\nsD23c6S7wwAZdltK1spIRMkSS2AhJBN+ePXBiAGWjatSWsBKlDZqww==\n-----END AGE ENCRYPTED FILE-----\n"
} }
], ],
"lastmodified": "2026-03-13T17:29:46Z", "lastmodified": "2026-03-13T17:29:46Z",

View File

@@ -4,23 +4,23 @@
"age": [ "age": [
{ {
"recipient": "age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7", "recipient": "age1xate984yhl9qk9d4q99pyxmzz48sq56nfhu8weyzkgum4ed5tc5shjmrs7",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBqMHVFbUFpOGdXc0t4Q2dj\na1lxeWFBR3ROTDN6RVlwYlgxZmdhd1ozNmdvCnVpZFFnaGFqWGs3UmRtZytlUEZK\nK05lM2ttWmNOSGU5ZlBTYWZ6K0dCbzgKLS0tIEpHQXRmaVVhbXJRRUI4REdCZWI5\nTmFDY1gvWVVkUExpeWYxTjlCb3MwUUUKnHzWgaUvDXH1I3TPyY38h9Pbjqk4Whma\n8ctECOpg1obtMr+a9Bdw11IPwAe3R9K0ZfE681HroETCHRxw2+38yw==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBDMWRWUElvQ3ZRTjFKRVQ3\nYmZINCt2eWJBbWcvLzQvWWlURlpzMldSWkZFCnhidzFMZjliUkt0MUVML0pLQ21L\nQ0pwL2pteEFlaW0wRFJBZk9BSHRXa3cKLS0tIEc1dXlNdDQxUFZrVUc1Z0VBMHQ1\ncXNaYnd5dVVvTU41bTRnRUdGVjEzczAKjfqq9yciDRBW+N2M+5mRRztlW5+JRL0n\nRL5aZlsDVo3SJAxjaaGx4zgwN7dUX6JPrTGW6sYd5LCGuHZWX8phfQ==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm", "recipient": "age1njjegjjdqzfnrr54f536yl4lduqgna3wuv7ef6vtl9jw5cju0grsgy62tm",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBFRU43QXpWb1JDUVJCck9z\nNkFXWXdWY3pJZmx0Mjc5SDgwcVFXWkRua3pVCjAxQkJBUXF5T3hxek00ZzhRcmF2\nb2tWQlYwVXpiM1dMRFdqNXIyQjRhb1UKLS0tIDB2Q003YjlCTXUxa2k5RmMrWVds\neXhLSzViMHl6Smo3dDljRnRsT3lRb1EKKxaXLegA0V6vTnYF7l7MsJD42XWidB/h\nGiOojZ6r3JVV4Wx4Xl/MPk7lYvYN9VH//PUlA7h0Q9iVaQS+HA+aZw==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBJNXBjRGdmWGFEMmY3T0F3\nS2lFcXRKWnl5T0JnZkRxTzJUK0FlWlIvRVNvClVESkhEdFgzZnhKTTdaZEd3N3Yr\nWWJYTWI2M2Mwd3F3ZWdJVkdLMWwrRlkKLS0tIFFwcEdQUVA1dUFRRUtkUDN5RUZt\nTjlQNEx1ei9UeVpRTjcxL2t5NVBUajAKaZOQ1GQ3vtgZdxxoXQKU5Q3jSUnakW9F\n6bHKbhBGSFwCb3u9+mgVaefBU11vT1ue1kA8sCM03MkYBmM+8cPv7g==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j", "recipient": "age187jl7e4k9n4guygkmpuqzeh0wenefwrfkpvuyhvwjrjwxqpzassqq3x67j",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBob0MwUkJvRDRnQ05XMVRh\nODY5WE8wVlBkTW8xWUlLWUwxNHV6Y0FJTW5zCkdVV1dJTGlxREdGbWowak1HL3pH\nejNKV0VvejRadTVCeVdxdFkyMk0yOG8KLS0tIEllWldkaXZ1TzYzSmFoQ21nMkV4\nZlFxN0wxWlpvSHdreTdxQ1NZcit0dGsK+23q1QS5L1JzqFcKUygqfmYc64qVOO1R\nSiGa7O7ZoRMnn0Zfjas3K+byJ4PzNitEEU64s1518L/e78oHKQ9vVQ==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB2a01mMmNPckZTbmJvQUVC\ncDBrVCtlRmpJMVByeE5zWFMrUCtHSkdDRFNBCmpZazA4Q0duejNYcmoxWDVMR3A5\nclh5OXZNNWd0VjI3cGlWbk5JTUw5NHcKLS0tIEVma1VsQ3cwSzdGZTlpanA2ZWNF\nQ2xLSjNkdjJyby95aTdTYk9HSHFvM1EK29MPNiB5GKqBFPgLcIp1giWzYRpPZ7ur\nDSKqvZ4kbEyxgdLxJ/i2P2iv4/4bb14LWQno9iEXsVeZ/iKf1mDigg==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1ez6j3r5wdp0tjy7n5qzv5vfakdc2nh2zeu388zu7a80l0thv052syxq5e2", "recipient": "age1f9h725ewwwwwkelnrvdvrurg6fcsn3zxrxdt0v6v8ys0nzngcsvqu77nc8",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBaakJHOXRxeGRLZldZVmVi\nNHpFNmtYU1BCUnVKR1I3ZC9CSW5ORXlCbG5JCnpyUFA5NDNzbExob0s2R1EybzVJ\nb2tqSWtmZk4zQ1RZQmhKUmFvTFhTUjgKLS0tIG1DV04zMzVXdk9yQ1FFQzZsakM1\nTXQ0dCsrN0Z5RUVRamJUZEo5UmRiZWsK/av25ulEhPih+AK2rnktJsK6/SYS4IDx\nBuI2hBRZTdw1m4bBcCwMkLn/F1Rxd37xI1NZKKE3+CgnIs5G9XxhAw==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBteGpDZkpLcFIyVzNMbXFV\nMlY4SExrTThkdEZJczdLdlVwUWpVUWVuTUJvCmFXVG1ESHlIOExSdFJJOXhtb1JZ\nSVBocEtJTjRkckNwYktOdEdBQVQ5TVkKLS0tIE9iZ1pzMGhVc3F4THJwb2pFK2dm\nWnZ0SkhyNFMyQ1VVa0tWbDlXR1ZwK0EKPJq69mAeq3ghUpY+VuS+xCKeJVwmYnGm\nQ8mtmk/5skqbeOSKAbIiEEY5avPRy+Uy3YTD7g6vpSCqnpNSLEc/qg==\n-----END AGE ENCRYPTED FILE-----\n"
}, },
{ {
"recipient": "age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3", "recipient": "age1tlymdmaukhwupzrhszspp26lgd8s64rw4vu9lwc7gsgrjm78095s9fe9l3",
"enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBEV1dtckt6Wi9HQ3Fxd0Ev\nRnRzcWs5SWRmNEpYbmZQTnEzbnliVDRkZ3pnCjUyTEFOYjd5SC9STVd2V1o0NGVR\nelphZE1ISGlkemdXSTBlcURDYUh3OGsKLS0tIHUxZFRKbVFsM0FWZ2ZEa0FaYjl5\nTWhTMnptYU4xNmp4aVNYMDRORkk5NVUKiwwbLVe3mtVe9sgeSA/FUhkowfFeirbA\nXXL9ct+lizSNXsFG7w3xpZsEaNGbHF28maZHWOpZotKRbdx4w6UJGQ==\n-----END AGE ENCRYPTED FILE-----\n" "enc": "-----BEGIN AGE ENCRYPTED FILE-----\nYWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBIZENwNHhLdzNYYmEvOHNW\ndEZJbVpUMGJyaVU5WkR0bUc0NzlsMWtmdjN3ClMyM2dGa3BlVTFqeDB6ZHRGSTNu\nREZSN0JXUkU4ektnWis0L3lCOHdpTmMKLS0tIGQxZTFRYjYvMS9uM2lYUThwYmtp\nbkYrTlNQeEd6aVNTY25aMHFVbnJXSWMKulwDyWezqDUIlv/aHMMGzGqOFU3VGaw3\nYvm/e2wFWPenFH0gfALkdC8upRghE8r9jkXcj1pSmDBbfjNghz+2oQ==\n-----END AGE ENCRYPTED FILE-----\n"
} }
], ],
"lastmodified": "2025-12-20T21:46:17Z", "lastmodified": "2025-12-20T21:46:17Z",