diff --git a/bun.lock b/bun.lock
index 8fa8d02544a5..3b475d9e6eea 100644
--- a/bun.lock
+++ b/bun.lock
@@ -415,6 +415,8 @@
"@clack/prompts": "1.0.0-alpha.1",
"@effect/opentelemetry": "catalog:",
"@effect/platform-node": "catalog:",
+ "@ff-labs/fff-bun": "0.8.1",
+ "@ff-labs/fff-node": "0.8.1",
"@gitlab/opencode-gitlab-auth": "1.3.3",
"@lydell/node-pty": "catalog:",
"@modelcontextprotocol/sdk": "1.27.1",
@@ -1228,6 +1230,10 @@
"@fastify/rate-limit": ["@fastify/rate-limit@10.3.0", "", { "dependencies": { "@lukeed/ms": "^2.0.2", "fastify-plugin": "^5.0.0", "toad-cache": "^3.7.0" } }, "sha512-eIGkG9XKQs0nyynatApA3EVrojHOuq4l6fhB4eeCk4PIOeadvOJz9/4w3vGI44Go17uaXOWEcPkaD8kuKm7g6Q=="],
+ "@ff-labs/fff-bun": ["@ff-labs/fff-bun@0.8.1", "", { "optionalDependencies": { "@ff-labs/fff-bin-darwin-arm64": "0.8.1", "@ff-labs/fff-bin-darwin-x64": "0.8.1", "@ff-labs/fff-bin-linux-arm64-gnu": "0.8.1", "@ff-labs/fff-bin-linux-arm64-musl": "0.8.1", "@ff-labs/fff-bin-linux-x64-gnu": "0.8.1", "@ff-labs/fff-bin-linux-x64-musl": "0.8.1", "@ff-labs/fff-bin-win32-arm64": "0.8.1", "@ff-labs/fff-bin-win32-x64": "0.8.1" }, "peerDependencies": { "bun": ">=1.0.0" }, "os": [ "linux", "win32", "darwin", ], "cpu": [ "x64", "arm64", ], "bin": { "fff-demo": "examples/search.ts", "fff-grep": "examples/grep.ts" } }, "sha512-GbGdl1LaeYSfLZSnWIZYn/wqfGFs+XUnxutByJ4ZZ4ZEmw85bwMzTVukRIlujZHxeCIbhFT4OMlp8btp8Jcogg=="],
+
+ "@ff-labs/fff-node": ["@ff-labs/fff-node@0.8.1", "", { "dependencies": { "ffi-rs": "^1.0.0" }, "optionalDependencies": { "@ff-labs/fff-bin-darwin-arm64": "0.8.1", "@ff-labs/fff-bin-darwin-x64": "0.8.1", "@ff-labs/fff-bin-linux-arm64-gnu": "0.8.1", "@ff-labs/fff-bin-linux-arm64-musl": "0.8.1", "@ff-labs/fff-bin-linux-x64-gnu": "0.8.1", "@ff-labs/fff-bin-linux-x64-musl": "0.8.1", "@ff-labs/fff-bin-win32-arm64": "0.8.1", "@ff-labs/fff-bin-win32-x64": "0.8.1" }, "os": [ "linux", "win32", "darwin", ], "cpu": [ "x64", "arm64", ] }, "sha512-8hmJ6j9uPBOQ7Ky/+67ZBAa9JGx+j5utl3qJm6uiv8BG/JMQia832z5LNUDwpR/eeppiortlAULwf8EfElnqOg=="],
+
"@floating-ui/core": ["@floating-ui/core@1.7.5", "", { "dependencies": { "@floating-ui/utils": "^0.2.11" } }, "sha512-1Ih4WTWyw0+lKyFMcBHGbb5U5FtuHJuujoyyr5zTaWS5EYMeT6Jb2AuDeftsCsEuchO+mM2ij5+q9crhydzLhQ=="],
"@floating-ui/dom": ["@floating-ui/dom@1.7.6", "", { "dependencies": { "@floating-ui/core": "^1.7.5", "@floating-ui/utils": "^0.2.11" } }, "sha512-9gZSAI5XM36880PPMm//9dfiEngYoC6Am2izES1FF406YFsjvyBMmeJ2g4SAju3xWwtuynNRFL2s9hgxpLI5SQ=="],
@@ -1618,6 +1624,38 @@
"@oslojs/jwt": ["@oslojs/jwt@0.2.0", "", { "dependencies": { "@oslojs/encoding": "0.4.1" } }, "sha512-bLE7BtHrURedCn4Mco3ma9L4Y1GR2SMBuIvjWr7rmQ4/W/4Jy70TIAgZ+0nIlk0xHz1vNP8x8DCns45Sb2XRbg=="],
+ "@oven/bun-darwin-aarch64": ["@oven/bun-darwin-aarch64@1.3.14", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Omj20SuiHBOUjUBIyqtkNjSUIjOtEOJwmbix/ZyFH4BaQ6OZTaaRWIR4TjHVz0yadHgli6lLTiAh1uarnvD49A=="],
+
+ "@oven/bun-darwin-x64": ["@oven/bun-darwin-x64@1.3.14", "", { "os": "darwin", "cpu": "x64" }, "sha512-FFj3QdU/OhlDyZOJ8CWfN5eWLpRlT4qjZg7lMQi7jA6GuoY5ajlO1zWLP/MuHYRSbXQUvV52RejNi8DVnAp13w=="],
+
+ "@oven/bun-darwin-x64-baseline": ["@oven/bun-darwin-x64-baseline@1.3.14", "", { "os": "darwin", "cpu": "x64" }, "sha512-OSfsTZstc898HHElhU4NccaBGOSSDn5VfahiVTnidZ9B/+wb7WTyfZJaBeJcfjwJ9H2W9uTh2TGtl3UfcXgV9g=="],
+
+ "@oven/bun-freebsd-aarch64": ["@oven/bun-freebsd-aarch64@1.3.14", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-LIKrXaFxAHybVO5Pf+9XP2FHUj/5APvXTUKk9dqHm5iFz4oH+W24cmhjkJirNujh9hKeTyrpWSe3no9JZKowIw=="],
+
+ "@oven/bun-freebsd-x64": ["@oven/bun-freebsd-x64@1.3.14", "", { "os": "freebsd", "cpu": "x64" }, "sha512-uwD+fGUH1ADpIF3B1U2jWzzb20QwRLZfj5QZ28GUCGrAJ/nTmWrD6YYGsblCY1wuhldRez3lU40AyuvSCyLYmw=="],
+
+ "@oven/bun-linux-aarch64": ["@oven/bun-linux-aarch64@1.3.14", "", { "os": "linux", "cpu": "arm64" }, "sha512-X5SsPZHs+iYO8R/efIcRtc7gT2Q2DgPfliCxEkx4cXBumwkw0c/EsHMNwH3EgGpCDaZ7IYVPhpCG/xBOQHEwZw=="],
+
+ "@oven/bun-linux-aarch64-android": ["@oven/bun-linux-aarch64-android@1.3.14", "", { "os": "android", "cpu": "arm64" }, "sha512-y4kq5b85lsrmFb9Xvi4w9mA5IEFJkLMrSmYn06q24KjL9rUWDWO3VFZEtteZxUN5+ec3Zm5S8OnJw1umaCbVjA=="],
+
+ "@oven/bun-linux-aarch64-musl": ["@oven/bun-linux-aarch64-musl@1.3.14", "", { "os": "linux", "cpu": "arm64" }, "sha512-jmqOA92Cd1NL/1XBd4bFkJLxQ86K0RW7ohxS2qzzAvuitO4JiIxjjTeCspoU44zCozH72HpfZfUE2On31OjnWA=="],
+
+ "@oven/bun-linux-x64": ["@oven/bun-linux-x64@1.3.14", "", { "os": "linux", "cpu": "x64" }, "sha512-7OVTAKvwfPmSbIV1HpdOoVVx5VRc427GuPPne93N6vk4eQBPId9nXmZDh9/zGaKPdbVjVtQSZafWQoUjx38Utw=="],
+
+ "@oven/bun-linux-x64-android": ["@oven/bun-linux-x64-android@1.3.14", "", { "os": "android", "cpu": "x64" }, "sha512-qe9e1d+3VAEU7nAA2ol9Jvmy/o99PVMSgZhHn7Q/9O3YcDrfEqyQ8zm4zoe5qTEo8HZH0dN03Le0Ys2eQPs7eg=="],
+
+ "@oven/bun-linux-x64-baseline": ["@oven/bun-linux-x64-baseline@1.3.14", "", { "os": "linux", "cpu": "x64" }, "sha512-q/8EdOC0yUE8FPeoOVq8/Pw5I9/tJaYmUfO/uDUAREx8IUnOJH1RJ5A3BjFqre8pvJoiZA9AovPJq5FnNNjSxA=="],
+
+ "@oven/bun-linux-x64-musl": ["@oven/bun-linux-x64-musl@1.3.14", "", { "os": "linux", "cpu": "x64" }, "sha512-GBCB/k/sIqcr06eTNgg7g46qiUv35Jasx4XiccJ/n7RGqrE4RWUD/XJBbWFprVPjvqd59+QtSnS99XGqvftHfg=="],
+
+ "@oven/bun-linux-x64-musl-baseline": ["@oven/bun-linux-x64-musl-baseline@1.3.14", "", { "os": "linux", "cpu": "x64" }, "sha512-n6iE71G4lQE4XkrZhQQcL5YUlxDbnq6nqV7zeQi33PMsLT/0kYE+RvHOtBWZ3w0wMdXZfINmp63hIb9ijUBGtw=="],
+
+ "@oven/bun-windows-aarch64": ["@oven/bun-windows-aarch64@1.3.14", "", { "os": "win32", "cpu": "arm64" }, "sha512-T7s3x/BsVKQObGU6QDkZeI6wKynzqGbBH1yI77jrrj5siElclxr3DQrDIk8CV4G5/SJq2HHq4kpLyYY2DKCSmA=="],
+
+ "@oven/bun-windows-x64": ["@oven/bun-windows-x64@1.3.14", "", { "os": "win32", "cpu": "x64" }, "sha512-mUFWL3BoYkNpjd8e9PqROiFF/1Xeotq20mABJsiQH62jM1g5zqWh4khw1RZ6bX8Q8fWvlPaxG1PjofkmjUi3vg=="],
+
+ "@oven/bun-windows-x64-baseline": ["@oven/bun-windows-x64-baseline@1.3.14", "", { "os": "win32", "cpu": "x64" }, "sha512-uIjLUC1S9DWgICzuoMba7vurBJnBruE4S5CxnvmZkdqWVXRzx1Rgu636HoH+k0qeaQCFh3jeG3JQ1y6fRHv0sw=="],
+
"@oxc-minify/binding-android-arm64": ["@oxc-minify/binding-android-arm64@0.96.0", "", { "os": "android", "cpu": "arm64" }, "sha512-lzeIEMu/v6Y+La5JSesq4hvyKtKBq84cgQpKYTYM/yGuNk2tfd5Ha31hnC+mTh48lp/5vZH+WBfjVUjjINCfug=="],
"@oxc-minify/binding-darwin-arm64": ["@oxc-minify/binding-darwin-arm64@0.96.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-i0LkJAUXb4BeBFrJQbMKQPoxf8+cFEffDyLSb7NEzzKuPcH8qrVsnEItoOzeAdYam8Sr6qCHVwmBNEQzl7PWpw=="],
@@ -2480,6 +2518,28 @@
"@xmldom/xmldom": ["@xmldom/xmldom@0.8.12", "", {}, "sha512-9k/gHF6n/pAi/9tqr3m3aqkuiNosYTurLLUtc7xQ9sxB/wm7WPygCv8GYa6mS0fLJEHhqMC1ATYhz++U/lRHqg=="],
+ "@yuuang/ffi-rs-android-arm64": ["@yuuang/ffi-rs-android-arm64@1.3.2", "", { "os": "android", "cpu": "arm64" }, "sha512-eDYLT0kVBkp7e2BwdRDmt6N1rkeDPUHDefk3ZX0/nok+GLsqfy1WBoSL3Yg7HVXN1EyW8OBVc2uK8Zq8HbmaSA=="],
+
+ "@yuuang/ffi-rs-darwin-arm64": ["@yuuang/ffi-rs-darwin-arm64@1.3.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-kRdgPaOM6TfuC5wHUwstlatk4HNie2lwSLJWQL2LiAUIJ7+96CoiWUNVhwBcFrhdfxhnWenYS6F668CV0vit8Q=="],
+
+ "@yuuang/ffi-rs-darwin-x64": ["@yuuang/ffi-rs-darwin-x64@1.3.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-O3AlVgre8FQcZRJe44Xs7A6iDLumoPXqbw40+eJCa2gyXaXyLPdHoWrS1W9rBCa1QZRRnG7zRulPVFw8C5uo8g=="],
+
+ "@yuuang/ffi-rs-linux-arm-gnueabihf": ["@yuuang/ffi-rs-linux-arm-gnueabihf@1.3.2", "", { "os": "linux", "cpu": "arm" }, "sha512-IXiNdTbIcTCPny5eeElijFWYeKSJjQWSjt9ZyJNdLHYiB1Np+XD6K7wNZS6EOMgMelhW1kQE62T654skGkVDIA=="],
+
+ "@yuuang/ffi-rs-linux-arm64-gnu": ["@yuuang/ffi-rs-linux-arm64-gnu@1.3.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-gWFO6xufUK9lPYUqDvKa6IR243dPqdetgl9Q7HrZWaDu7wLo06QQrosw8QTzndafQnOcBKm6LoLujmGCfTgJOA=="],
+
+ "@yuuang/ffi-rs-linux-arm64-musl": ["@yuuang/ffi-rs-linux-arm64-musl@1.3.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-lejvOSqypPziQH5rzfkDlJ6e92qhWbDutE9ttOO6z5I2k83zoh9iZhZWhaXSU5VqgQpcshRkrbtXb9gy1ft5dA=="],
+
+ "@yuuang/ffi-rs-linux-x64-gnu": ["@yuuang/ffi-rs-linux-x64-gnu@1.3.2", "", { "os": "linux", "cpu": "x64" }, "sha512-s8VCFazaJKmgY2hgMTpWk4TtBY/zy5ovbaGgwyY0FvBD0YvyhcET4IrMsDJpHhFVTPCYfKZ1dN45clD/YiFp6g=="],
+
+ "@yuuang/ffi-rs-linux-x64-musl": ["@yuuang/ffi-rs-linux-x64-musl@1.3.2", "", { "os": "linux", "cpu": "x64" }, "sha512-Ahr5chfKZKWUik20bEZRug+be57LZ2yYrtolyjSRoo7A4ZniBUHBZUNWm6TD6i0CJayqyxWeVk/XiaABD8bY0w=="],
+
+ "@yuuang/ffi-rs-win32-arm64-msvc": ["@yuuang/ffi-rs-win32-arm64-msvc@1.3.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-yhpLcj0qel5VNlpzxPZfNmi7+rEX8444QHjUP6WWLxdRfqPllROu/Cp3OpkBpw3BLdxfcDhWkjWMD5QsJN0Pvg=="],
+
+ "@yuuang/ffi-rs-win32-ia32-msvc": ["@yuuang/ffi-rs-win32-ia32-msvc@1.3.2", "", { "os": "win32", "cpu": [ "x64", "ia32", ] }, "sha512-BFVSbdtg/7mJBw5kQFOPKFiA+SF7z3240HpzHN81Umm4Bp4dWkyx0msYn8+Q7/BBJiLQ4F6bi3Nftk58YA9r9w=="],
+
+ "@yuuang/ffi-rs-win32-x64-msvc": ["@yuuang/ffi-rs-win32-x64-msvc@1.3.2", "", { "os": "win32", "cpu": "x64" }, "sha512-ZL5MJ76n2rjwGo26kCWW7wK6QT/cee00Rx8pfW79pz6vM6jqfhoE7zTnwFiw4aOQUes9+HUc5DeeJ3z+Vb9oLg=="],
+
"@zip.js/zip.js": ["@zip.js/zip.js@2.7.62", "", {}, "sha512-OaLvZ8j4gCkLn048ypkZu29KX30r8/OfFF2w4Jo5WXFr+J04J+lzJ5TKZBVgFXhlvSkqNFQdfnY1Q8TMTCyBVA=="],
"abbrev": ["abbrev@4.0.0", "", {}, "sha512-a1wflyaL0tHtJSmLSOVybYhy22vRih4eduhhrkcjgrWGnRfrZtovJ2FRjxuTtkkj47O/baf0R86QU5OuYpz8fA=="],
@@ -2690,6 +2750,8 @@
"builder-util-runtime": ["builder-util-runtime@9.5.1", "", { "dependencies": { "debug": "^4.3.4", "sax": "^1.2.4" } }, "sha512-qt41tMfgHTllhResqM5DcnHyDIWNgzHvuY2jDcYP9iaGpkWxTUzV6GQjDeLnlR1/DtdlcsWQbA7sByMpmJFTLQ=="],
+ "bun": ["bun@1.3.14", "", { "optionalDependencies": { "@oven/bun-darwin-aarch64": "1.3.14", "@oven/bun-darwin-x64": "1.3.14", "@oven/bun-darwin-x64-baseline": "1.3.14", "@oven/bun-freebsd-aarch64": "1.3.14", "@oven/bun-freebsd-x64": "1.3.14", "@oven/bun-linux-aarch64": "1.3.14", "@oven/bun-linux-aarch64-android": "1.3.14", "@oven/bun-linux-aarch64-musl": "1.3.14", "@oven/bun-linux-x64": "1.3.14", "@oven/bun-linux-x64-android": "1.3.14", "@oven/bun-linux-x64-baseline": "1.3.14", "@oven/bun-linux-x64-musl": "1.3.14", "@oven/bun-linux-x64-musl-baseline": "1.3.14", "@oven/bun-windows-aarch64": "1.3.14", "@oven/bun-windows-x64": "1.3.14", "@oven/bun-windows-x64-baseline": "1.3.14" }, "os": [ "!aix", "!sunos", "!openbsd", ], "cpu": [ "x64", "arm64", ], "bin": { "bun": "bin/bun.exe", "bunx": "bin/bunx.exe" } }, "sha512-aB6GVd42x1Y5ie1K16SF+oLGtgSkwX9hgoDdIW88pjvfTccU8F1vfpoOt34QLv0dZ1v3XimtaxPlZUG81Gx9Zg=="],
+
"bun-ffi-structs": ["bun-ffi-structs@0.2.2", "", { "peerDependencies": { "typescript": "^5" } }, "sha512-N/ZWtyN0piZlrXQT7TO0V+q952orYqkfhXRXM1Hcbb+R3QSiBH4vLnib187Mrs1H7pWIYECAmPeapGYDOMCl+w=="],
"bun-pty": ["bun-pty@0.4.8", "", {}, "sha512-rO70Mrbr13+jxHHHu2YBkk2pNqrJE5cJn29WE++PUr+GFA0hq/VgtQPZANJ8dJo6d7XImvBk37Innt8GM7O28w=="],
@@ -3178,6 +3240,8 @@
"fetch-blob": ["fetch-blob@3.2.0", "", { "dependencies": { "node-domexception": "^1.0.0", "web-streams-polyfill": "^3.0.3" } }, "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ=="],
+ "ffi-rs": ["ffi-rs@1.3.2", "", { "optionalDependencies": { "@yuuang/ffi-rs-android-arm64": "1.3.2", "@yuuang/ffi-rs-darwin-arm64": "1.3.2", "@yuuang/ffi-rs-darwin-x64": "1.3.2", "@yuuang/ffi-rs-linux-arm-gnueabihf": "1.3.2", "@yuuang/ffi-rs-linux-arm64-gnu": "1.3.2", "@yuuang/ffi-rs-linux-arm64-musl": "1.3.2", "@yuuang/ffi-rs-linux-x64-gnu": "1.3.2", "@yuuang/ffi-rs-linux-x64-musl": "1.3.2", "@yuuang/ffi-rs-win32-arm64-msvc": "1.3.2", "@yuuang/ffi-rs-win32-ia32-msvc": "1.3.2", "@yuuang/ffi-rs-win32-x64-msvc": "1.3.2" } }, "sha512-4s8dX9VbBw/jd5NOuE3EJRqXaIVdjMyiumeeDzrOhtjQRwp6Bz2za7iksWXTnvTQKV/tTdm1s1w7mObe92zPjQ=="],
+
"filelist": ["filelist@1.0.6", "", { "dependencies": { "minimatch": "^5.0.1" } }, "sha512-5giy2PkLYY1cP39p17Ech+2xlpTRL9HLspOfEgm0L6CwBXBTgsK5ou0JtzYuepxkaQ/tvhCFIJ5uXo0OrM2DxA=="],
"fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="],
diff --git a/bunfig.toml b/bunfig.toml
index 47c4ac53965b..e3f06df89848 100644
--- a/bunfig.toml
+++ b/bunfig.toml
@@ -2,7 +2,7 @@
exact = true
# Only install newly resolved package versions published at least 3 days ago.
minimumReleaseAge = 259200
-minimumReleaseAgeExcludes = ["@opentui/core", "@opentui/core-darwin-arm64", "@opentui/core-darwin-x64", "@opentui/core-linux-arm64", "@opentui/core-linux-x64", "@opentui/core-win32-arm64", "@opentui/core-win32-x64", "@opentui/keymap", "@opentui/solid"]
+minimumReleaseAgeExcludes = ["@opentui/core", "@opentui/core-darwin-arm64", "@opentui/core-darwin-x64", "@opentui/core-linux-arm64", "@opentui/core-linux-x64", "@opentui/core-win32-arm64", "@opentui/core-win32-x64", "@opentui/keymap", "@opentui/solid", "@ff-labs/fff-node", "@ff-labs/fff-bun"]
[test]
root = "./do-not-run-tests-from-root"
diff --git a/packages/opencode/package.json b/packages/opencode/package.json
index e3e8c1774a96..d2b421bc56a1 100644
--- a/packages/opencode/package.json
+++ b/packages/opencode/package.json
@@ -28,6 +28,11 @@
"node": "./src/storage/db.node.ts",
"default": "./src/storage/db.bun.ts"
},
+ "#fff": {
+ "bun": "./src/file/fff.bun.ts",
+ "node": "./src/file/fff.node.ts",
+ "default": "./src/file/fff.bun.ts"
+ },
"#pty": {
"bun": "./src/pty/pty.bun.ts",
"node": "./src/pty/pty.node.ts",
@@ -93,6 +98,8 @@
"@clack/prompts": "1.0.0-alpha.1",
"@effect/opentelemetry": "catalog:",
"@effect/platform-node": "catalog:",
+ "@ff-labs/fff-bun": "0.8.1",
+ "@ff-labs/fff-node": "0.8.1",
"@gitlab/opencode-gitlab-auth": "1.3.3",
"@lydell/node-pty": "catalog:",
"@modelcontextprotocol/sdk": "1.27.1",
diff --git a/packages/opencode/script/bench-search.ts b/packages/opencode/script/bench-search.ts
new file mode 100644
index 000000000000..5c8fbf08e449
--- /dev/null
+++ b/packages/opencode/script/bench-search.ts
@@ -0,0 +1,119 @@
+import { Effect } from "effect"
+import { Fff } from "#fff"
+import { AppRuntime } from "@/effect/app-runtime"
+import { Search } from "@/file/search"
+import { InstanceStore } from "@/project/instance-store"
+
+const dir = process.cwd()
+
+const FILE_QUERIES = ["fff", "package.json", "tools/ experiment"]
+const GREP_QUERIES = ["FileFinder", "import", "grep", "autocomplete"]
+const GLOB_QUERIES = ["**/*.test.ts"]
+
+const FILE_LIMIT = 100
+const GREP_LIMIT = 50
+const GLOB_LIMIT = 50
+
+const run = (effect: Effect.Effect) =>
+ AppRuntime.runPromise(
+ InstanceStore.Service.use((store) => store.provide({ directory: dir }, effect as never)),
+ ) as Promise
+
+// --- raw Fff picker ---
+const t0 = performance.now()
+const made = Fff.create({ basePath: dir, aiMode: true })
+if (!made.ok) {
+ console.error("Fff.create failed:", made.error)
+ process.exit(1)
+}
+const picker = made.value
+console.log(`picker create: ${(performance.now() - t0).toFixed(1)}ms`)
+
+const tw = performance.now()
+const deadline = tw + 2500
+while (picker.isScanning() && performance.now() < deadline) {
+ await new Promise((resolve) => setTimeout(resolve, 25))
+}
+console.log(`wait for scan (poll): ${(performance.now() - tw).toFixed(1)}ms`)
+
+// warmup grep to let the content index build
+const tWarmup = performance.now()
+picker.grep("_warmup_", { mode: "regex", maxMatchesPerFile: 1, timeBudgetMs: 1_500 })
+console.log(`grep warmup: ${(performance.now() - tWarmup).toFixed(1)}ms`)
+
+console.log()
+console.log("--- raw picker (warm) ---")
+
+for (const q of FILE_QUERIES) {
+ const t = performance.now()
+ const r = picker.fileSearch(q, { pageSize: Math.max(FILE_LIMIT, 100) })
+ const count = r.ok ? r.value.items.length : "err"
+ console.log(`[picker] fileSearch "${q}": ${(performance.now() - t).toFixed(1)}ms (${count} results)`)
+}
+
+for (const q of GREP_QUERIES) {
+ const t = performance.now()
+ const r = picker.grep(q, { mode: "regex", pageSize: GREP_LIMIT, timeBudgetMs: 1_500 })
+ const count = r.ok ? r.value.items.length : "err"
+ console.log(`[picker] grep "${q}": ${(performance.now() - t).toFixed(1)}ms (${count} matches)`)
+}
+
+picker.destroy()
+
+// --- Ripgrep service (via Search with file:["."] to force rg path) ---
+console.log()
+console.log("--- Ripgrep (via Search service) ---")
+
+// warmup
+await run(Search.Service.use((svc) => svc.search({ cwd: dir, pattern: "_warmup_rg_", limit: 1, file: ["."] })))
+
+for (const q of GREP_QUERIES) {
+ const t = performance.now()
+ const r = await run(Search.Service.use((svc) => svc.search({ cwd: dir, pattern: q, limit: GREP_LIMIT, file: ["."] })))
+ console.log(
+ `[ripgrep] grep "${q}": ${(performance.now() - t).toFixed(1)}ms (${r.items.length} total, limit is per-file not total)`,
+ )
+}
+
+// --- Search service: init breakdown ---
+console.log()
+
+// 1) runtime + InstanceState + picker create + scan poll
+const tRuntime = performance.now()
+await run(Search.Service.use((svc) => svc.file({ cwd: dir, query: "_warmup_file_", limit: 1 })))
+console.log(`[Search] init file (runtime + picker + scan): ${(performance.now() - tRuntime).toFixed(1)}ms`)
+
+// 2) grep warmup (content index cold-start inside the Search service picker)
+const tGrepWarmup = performance.now()
+await run(Search.Service.use((svc) => svc.search({ cwd: dir, pattern: "_warmup_grep_", limit: 1 })))
+console.log(`[Search] init grep (content index warmup): ${(performance.now() - tGrepWarmup).toFixed(1)}ms`)
+
+console.log()
+console.log("--- Search service (warm) ---")
+
+for (const q of FILE_QUERIES) {
+ const t = performance.now()
+ const r = await run(Search.Service.use((svc) => svc.file({ cwd: dir, query: q, limit: FILE_LIMIT })))
+ console.log(
+ `[Search.file] "${q}": ${(performance.now() - t).toFixed(1)}ms (${r?.length ?? "undefined (cache fallback)"} results)`,
+ )
+}
+
+for (const q of GREP_QUERIES) {
+ const t = performance.now()
+ const r = await run(Search.Service.use((svc) => svc.search({ cwd: dir, pattern: q, limit: GREP_LIMIT })))
+ console.log(
+ `[Search.search] "${q}": ${(performance.now() - t).toFixed(1)}ms (${r.items.length} matches, engine=${r.engine})`,
+ )
+}
+
+for (const q of GLOB_QUERIES) {
+ const t = performance.now()
+ const r = await run(Search.Service.use((svc) => svc.glob({ cwd: dir, pattern: q, limit: GLOB_LIMIT })))
+ console.log(
+ `[Search.glob] "${q}": ${(performance.now() - t).toFixed(1)}ms (${r.files.length} files, truncated=${r.truncated})`,
+ )
+}
+
+process.exit(0)
+
diff --git a/packages/opencode/src/cli/cmd/debug/file.ts b/packages/opencode/src/cli/cmd/debug/file.ts
index d9bb252ea988..fc665b843b57 100644
--- a/packages/opencode/src/cli/cmd/debug/file.ts
+++ b/packages/opencode/src/cli/cmd/debug/file.ts
@@ -1,7 +1,7 @@
import { EOL } from "os"
import { Effect } from "effect"
import { File } from "../../../file"
-import { Ripgrep } from "@/file/ripgrep"
+import { Search } from "@/file/search"
import { effectCmd } from "../../effect-cmd"
import { cmd } from "../cmd"
@@ -70,7 +70,7 @@ const FileTreeCommand = effectCmd({
default: process.cwd(),
}),
handler: Effect.fn("Cli.debug.file.tree")(function* (args) {
- const tree = yield* Effect.orDie(Ripgrep.Service.use((svc) => svc.tree({ cwd: args.dir, limit: 200 })))
+ const tree = yield* Effect.orDie(Search.Service.use((svc) => svc.tree({ cwd: args.dir, limit: 200 })))
console.log(JSON.stringify(tree, null, 2))
}),
})
diff --git a/packages/opencode/src/cli/cmd/debug/ripgrep.ts b/packages/opencode/src/cli/cmd/debug/ripgrep.ts
index 8d1cbd2b1eae..eb7405ed7839 100644
--- a/packages/opencode/src/cli/cmd/debug/ripgrep.ts
+++ b/packages/opencode/src/cli/cmd/debug/ripgrep.ts
@@ -1,6 +1,6 @@
import { EOL } from "os"
import { Effect, Stream } from "effect"
-import { Ripgrep } from "../../../file/ripgrep"
+import { Search } from "../../../file/search"
import { effectCmd } from "../../effect-cmd"
import { cmd } from "../cmd"
import { InstanceRef } from "@/effect/instance-ref"
@@ -22,7 +22,7 @@ const TreeCommand = effectCmd({
handler: Effect.fn("Cli.debug.rg.tree")(function* (args) {
const ctx = yield* InstanceRef
if (!ctx) return
- const tree = yield* Effect.orDie(Ripgrep.Service.use((svc) => svc.tree({ cwd: ctx.directory, limit: args.limit })))
+ const tree = yield* Effect.orDie(Search.Service.use((svc) => svc.tree({ cwd: ctx.directory, limit: args.limit })))
process.stdout.write(tree + EOL)
}),
})
@@ -47,8 +47,8 @@ const FilesCommand = effectCmd({
handler: Effect.fn("Cli.debug.rg.files")(function* (args) {
const ctx = yield* InstanceRef
if (!ctx) return
- const rg = yield* Ripgrep.Service
- const files = yield* rg
+ const search = yield* Search.Service
+ const files = yield* search
.files({
cwd: ctx.directory,
glob: args.glob ? [args.glob] : undefined,
@@ -85,7 +85,7 @@ const SearchCommand = effectCmd({
const ctx = yield* InstanceRef
if (!ctx) return
const results = yield* Effect.orDie(
- Ripgrep.Service.use((svc) =>
+ Search.Service.use((svc) =>
svc.search({
cwd: ctx.directory,
pattern: args.pattern,
diff --git a/packages/opencode/src/effect/app-runtime.ts b/packages/opencode/src/effect/app-runtime.ts
index 0ce876ddc65a..5b91513f225d 100644
--- a/packages/opencode/src/effect/app-runtime.ts
+++ b/packages/opencode/src/effect/app-runtime.ts
@@ -8,7 +8,7 @@ import { Auth } from "@/auth"
import { Account } from "@/account/account"
import { Config } from "@/config/config"
import { Git } from "@/git"
-import { Ripgrep } from "@/file/ripgrep"
+import { Search } from "@/file/search"
import { File } from "@/file"
import { FileWatcher } from "@/file/watcher"
import { Storage } from "@/storage/storage"
@@ -67,7 +67,7 @@ export const AppLayer = Layer.mergeAll(
Account.defaultLayer,
Config.defaultLayer,
Git.defaultLayer,
- Ripgrep.defaultLayer,
+ Search.defaultLayer,
File.defaultLayer,
FileWatcher.defaultLayer,
Storage.defaultLayer,
diff --git a/packages/opencode/src/file/fff.bun.ts b/packages/opencode/src/file/fff.bun.ts
new file mode 100644
index 000000000000..e1560101d6c1
--- /dev/null
+++ b/packages/opencode/src/file/fff.bun.ts
@@ -0,0 +1,86 @@
+import {
+ FileFinder,
+ type FileItem,
+ type GrepCursor,
+ type GrepMatch,
+ type GrepResult,
+ type InitOptions,
+ type SearchResult,
+} from "@ff-labs/fff-bun"
+
+export type Result = { ok: true; value: T } | { ok: false; error: string }
+
+export type Init = InitOptions
+
+export interface Search {
+ items: FileItem[]
+ scores: SearchResult["scores"]
+ totalMatched: number
+ totalFiles: number
+}
+
+export type File = FileItem
+export type Cursor = GrepCursor | null
+export type Hit = GrepMatch
+
+export interface Grep {
+ items: GrepResult["items"]
+ totalMatched: number
+ totalFilesSearched: number
+ totalFiles: number
+ filteredFileCount: number
+ nextCursor: Cursor
+ regexFallbackError?: string
+}
+
+export interface Picker {
+ destroy(): void
+ isScanning(): boolean
+ refreshGitStatus(): Result
+ fileSearch(
+ query: string,
+ opts?: {
+ currentFile?: string
+ pageIndex?: number
+ pageSize?: number
+ },
+ ): Result
+ grep(
+ query: string,
+ opts?: {
+ mode?: "plain" | "regex" | "fuzzy"
+ maxMatchesPerFile?: number
+ timeBudgetMs?: number
+ beforeContext?: number
+ afterContext?: number
+ cursor?: Cursor
+ pageSize?: number
+ },
+ ): Result
+ trackQuery(query: string, file: string): Result
+ getHistoricalQuery(offset: number): Result
+}
+
+export function available() {
+ return FileFinder.isAvailable()
+}
+
+export function create(opts: Init): Result {
+ const made = FileFinder.create(opts)
+ if (!made.ok) return made
+ const pick = made.value
+ return {
+ ok: true,
+ value: {
+ destroy: () => pick.destroy(),
+ isScanning: () => pick.isScanning(),
+ refreshGitStatus: () => pick.refreshGitStatus(),
+ fileSearch: (query, next) => pick.fileSearch(query, next),
+ grep: (query, next) => pick.grep(query, next),
+ trackQuery: (query, file) => pick.trackQuery(query, file),
+ getHistoricalQuery: (offset) => pick.getHistoricalQuery(offset),
+ },
+ }
+}
+
+export * as Fff from "./fff.bun"
diff --git a/packages/opencode/src/file/fff.node.ts b/packages/opencode/src/file/fff.node.ts
new file mode 100644
index 000000000000..3674218b94ae
--- /dev/null
+++ b/packages/opencode/src/file/fff.node.ts
@@ -0,0 +1,86 @@
+import {
+ FileFinder,
+ type FileItem,
+ type GrepCursor,
+ type GrepMatch,
+ type GrepResult,
+ type InitOptions,
+ type SearchResult,
+} from "@ff-labs/fff-node"
+
+export type Result = { ok: true; value: T } | { ok: false; error: string }
+
+export type Init = InitOptions
+
+export interface Search {
+ items: FileItem[]
+ scores: SearchResult["scores"]
+ totalMatched: number
+ totalFiles: number
+}
+
+export type File = FileItem
+export type Cursor = GrepCursor | null
+export type Hit = GrepMatch
+
+export interface Grep {
+ items: GrepResult["items"]
+ totalMatched: number
+ totalFilesSearched: number
+ totalFiles: number
+ filteredFileCount: number
+ nextCursor: Cursor
+ regexFallbackError?: string
+}
+
+export interface Picker {
+ destroy(): void
+ isScanning(): boolean
+ refreshGitStatus(): Result
+ fileSearch(
+ query: string,
+ opts?: {
+ currentFile?: string
+ pageIndex?: number
+ pageSize?: number
+ },
+ ): Result
+ grep(
+ query: string,
+ opts?: {
+ mode?: "plain" | "regex" | "fuzzy"
+ maxMatchesPerFile?: number
+ timeBudgetMs?: number
+ beforeContext?: number
+ afterContext?: number
+ cursor?: Cursor
+ pageSize?: number
+ },
+ ): Result
+ trackQuery(query: string, file: string): Result
+ getHistoricalQuery(offset: number): Result
+}
+
+export function available() {
+ return FileFinder.isAvailable()
+}
+
+export function create(opts: Init): Result {
+ const made = FileFinder.create(opts)
+ if (!made.ok) return made
+ const pick = made.value
+ return {
+ ok: true,
+ value: {
+ destroy: () => pick.destroy(),
+ isScanning: () => pick.isScanning(),
+ refreshGitStatus: () => pick.refreshGitStatus(),
+ fileSearch: (query, next) => pick.fileSearch(query, next),
+ grep: (query, next) => pick.grep(query, next),
+ trackQuery: (query, file) => pick.trackQuery(query, file),
+ getHistoricalQuery: (offset) => pick.getHistoricalQuery(offset),
+ },
+ }
+}
+
+export * as Fff from "./fff.node"
diff --git a/packages/opencode/src/file/index.ts b/packages/opencode/src/file/index.ts
index b951a4d7a5f0..4fe4c88e7d7c 100644
--- a/packages/opencode/src/file/index.ts
+++ b/packages/opencode/src/file/index.ts
@@ -13,7 +13,7 @@ import { Global } from "@opencode-ai/core/global"
import { containsPath } from "../project/instance-context"
import * as Log from "@opencode-ai/core/util/log"
import { Protected } from "./protected"
-import { Ripgrep } from "./ripgrep"
+import { Search } from "./search"
import { NonNegativeInt, type DeepMutable } from "@opencode-ai/core/schema"
export const Info = Schema.Struct({
@@ -330,7 +330,7 @@ export const layer = Layer.effect(
Service,
Effect.gen(function* () {
const appFs = yield* AppFileSystem.Service
- const rg = yield* Ripgrep.Service
+ const searchSvc = yield* Search.Service
const git = yield* Git.Service
const scope = yield* Scope.Scope
@@ -372,7 +372,7 @@ export const layer = Layer.effect(
next.dirs = Array.from(dirs).toSorted()
} else {
- const files = yield* rg.files({ cwd: ctx.directory }).pipe(
+ const files = yield* searchSvc.files({ cwd: ctx.directory }).pipe(
Stream.runCollect,
Effect.map((chunk) => [...chunk]),
)
@@ -499,6 +499,7 @@ export const layer = Layer.effect(
using _ = log.time("read", { file })
const ctx = yield* InstanceState.context
const full = path.join(ctx.directory, file)
+ const trackOpen = searchSvc.open({ cwd: ctx.directory, file }).pipe(Effect.ignore)
if (!containsPath(full, ctx)) {
throw new Error("Access denied: path escapes project directory")
@@ -506,21 +507,23 @@ export const layer = Layer.effect(
if (isImageByExtension(file)) {
const exists = yield* appFs.existsSafe(full)
- if (exists) {
- const bytes = yield* appFs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array())))
- return {
- type: "text" as const,
- content: Buffer.from(bytes).toString("base64"),
- mimeType: getImageMimeType(file),
- encoding: "base64" as const,
- }
+ if (!exists) return { type: "text" as const, content: "" }
+ yield* trackOpen
+ const bytes = yield* appFs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array())))
+ return {
+ type: "text" as const,
+ content: Buffer.from(bytes).toString("base64"),
+ mimeType: getImageMimeType(file),
+ encoding: "base64" as const,
}
- return { type: "text" as const, content: "" }
}
const knownText = isTextByExtension(file) || isTextByName(file)
- if (isBinaryByExtension(file) && !knownText) return { type: "binary" as const, content: "" }
+ if (isBinaryByExtension(file) && !knownText) {
+ yield* trackOpen
+ return { type: "binary" as const, content: "" }
+ }
const exists = yield* appFs.existsSafe(full)
if (!exists) return { type: "text" as const, content: "" }
@@ -531,6 +534,7 @@ export const layer = Layer.effect(
if (encode && !isImage(mimeType)) return { type: "binary" as const, content: "", mimeType }
if (encode) {
+ yield* trackOpen
const bytes = yield* appFs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array())))
return {
type: "text" as const,
@@ -551,6 +555,7 @@ export const layer = Layer.effect(
diff = yield* gitText(["-c", "core.fsmonitor=false", "diff", "--staged", "--", file])
}
if (diff.trim()) {
+ yield* trackOpen
const original = yield* git.show(ctx.directory, "HEAD", file)
const patch = structuredPatch(file, file, original, content, "old", "new", {
context: Infinity,
@@ -558,9 +563,11 @@ export const layer = Layer.effect(
})
return { type: "text" as const, content, patch, diff: formatPatch(patch) }
}
+ yield* trackOpen
return { type: "text" as const, content }
}
+ yield* trackOpen
return { type: "text" as const, content }
})
@@ -612,14 +619,29 @@ export const layer = Layer.effect(
dirs?: boolean
type?: "file" | "directory"
}) {
- yield* ensure()
- const { cache } = yield* InstanceState.get(state)
-
const query = input.query.trim()
const limit = input.limit ?? 100
const kind = input.type ?? (input.dirs === false ? "file" : "all")
log.info("search", { query, kind })
+ if (query && kind === "file") {
+ const ctx = yield* InstanceState.context
+ const files = yield* searchSvc.file({
+ cwd: ctx.directory,
+ query,
+ limit,
+ }).pipe(Effect.orDie)
+ if (files === undefined) {
+ log.info("search", { query, kind, mode: "cache" })
+ } else {
+ log.info("search", { query, kind, results: files.length, mode: "fff" })
+ return files
+ }
+ }
+
+ yield* ensure()
+ const { cache } = yield* InstanceState.get(state)
+
const preferHidden = query.startsWith(".") || query.includes("/.")
if (!query) {
@@ -643,7 +665,7 @@ export const layer = Layer.effect(
)
export const defaultLayer = layer.pipe(
- Layer.provide(Ripgrep.defaultLayer),
+ Layer.provide(Search.defaultLayer),
Layer.provide(AppFileSystem.defaultLayer),
Layer.provide(Git.defaultLayer),
)
diff --git a/packages/opencode/src/file/search.ts b/packages/opencode/src/file/search.ts
new file mode 100644
index 000000000000..3782747a420e
--- /dev/null
+++ b/packages/opencode/src/file/search.ts
@@ -0,0 +1,441 @@
+import path from "path"
+import { AppFileSystem } from "@opencode-ai/core/filesystem"
+import { Glob } from "@opencode-ai/core/util/glob"
+import { Clock, Context, Deferred, Effect, Layer, Option } from "effect"
+import * as Stream from "effect/Stream"
+import z from "zod"
+import * as InstanceState from "@/effect/instance-state"
+import { makeRuntime } from "@/effect/run-service"
+import { Global } from "@opencode-ai/core/global"
+import * as Log from "@opencode-ai/core/util/log"
+import { Fff } from "#fff"
+import { Ripgrep } from "./ripgrep"
+
+const log = Log.create({ service: "file.search" })
+const root = path.join(Global.Path.cache, "fff")
+
+export const Match = z.object({
+ path: z.object({
+ text: z.string(),
+ }),
+ lines: z.object({
+ text: z.string(),
+ }),
+ line_number: z.number(),
+ absolute_offset: z.number(),
+ submatches: z.array(
+ z.object({
+ match: z.object({
+ text: z.string(),
+ }),
+ start: z.number(),
+ end: z.number(),
+ }),
+ ),
+})
+
+export type Item = Ripgrep.Item
+
+export interface Result {
+ readonly items: Item[]
+ readonly partial: boolean
+ readonly hasNextPage: boolean
+ readonly engine: "fff" | "ripgrep"
+ readonly regexFallbackError?: string
+}
+
+export interface FileInput {
+ readonly cwd: string
+ readonly query: string
+ readonly limit?: number
+ readonly current?: string
+}
+
+export interface GlobInput {
+ readonly cwd: string
+ readonly pattern: string
+ readonly limit?: number
+ readonly signal?: AbortSignal
+}
+
+interface Query {
+ readonly dir: string
+ readonly text: string
+ readonly files: string[]
+}
+
+interface State {
+ readonly pick: Map
+ readonly wait: Map>
+ readonly recent: Query[]
+}
+
+export interface Interface {
+ readonly files: Ripgrep.Interface["files"]
+ readonly tree: Ripgrep.Interface["tree"]
+ readonly search: (input: Ripgrep.SearchInput) => Effect.Effect
+ readonly file: (input: FileInput) => Effect.Effect
+ readonly glob: (input: GlobInput) => Effect.Effect<{ files: string[]; truncated: boolean }, unknown>
+ readonly open: (input: { cwd?: string; file: string }) => Effect.Effect
+}
+
+export class Service extends Context.Service()("@opencode/Search") {}
+
+function key(dir: string) {
+ return Buffer.from(dir).toString("base64url")
+}
+
+function normalize(text: string) {
+ return text.replaceAll("\\", "/")
+}
+
+function include(pattern: string) {
+ const value = pattern.trim().replaceAll("\\", "/")
+ if (!value) return "*"
+ const flat = value.replaceAll("**/", "").replaceAll("/**", "/")
+ const idx = flat.lastIndexOf("/")
+ if (idx < 0) return flat
+ const dir = flat.slice(0, idx + 1)
+ const glob = flat.slice(idx + 1)
+ if (!glob) return dir
+ return `${dir} ${glob}`
+}
+
+// fff supports glob narrowing for any search out of the box
+function fffGlobbedQuery(query: string, glob?: string | string[]) {
+ if (query && glob) {
+ let resolvedGlob = ""
+ if (Array.isArray(glob)) {
+ resolvedGlob = glob.join(" ")
+ } else {
+ resolvedGlob = glob
+ }
+
+ return `${glob} ${query}`
+ }
+
+ return query ?? glob
+}
+
+function remember(state: State, dir: string, text: string, files: string[]) {
+ if (!files.length) return
+ const next = Array.from(new Set(files.map(AppFileSystem.resolve))).slice(0, 64)
+ if (!next.length) return
+ const idx = state.recent.findIndex((item) => item.dir === dir && item.text === text)
+ if (idx >= 0) state.recent.splice(idx, 1)
+ state.recent.unshift({ dir, text, files: next })
+ if (state.recent.length > 32) state.recent.length = 32
+}
+
+function item(hit: Fff.Hit): Item {
+ const line = Buffer.from(hit.lineContent)
+ return {
+ path: { text: normalize(hit.relativePath) },
+ lines: { text: hit.lineContent },
+ line_number: hit.lineNumber,
+ absolute_offset: hit.byteOffset,
+ submatches: hit.matchRanges
+ .map(([start, end]) => {
+ const text = line.subarray(start, end).toString("utf8")
+ if (!text) return undefined
+ return {
+ match: { text },
+ start,
+ end,
+ }
+ })
+ .filter((row): row is Item["submatches"][number] => Boolean(row)),
+ }
+}
+
+export const layer: Layer.Layer = Layer.effect(
+ Service,
+ Effect.gen(function* () {
+ const fs = yield* AppFileSystem.Service
+ const rg = yield* Ripgrep.Service
+ const state = yield* InstanceState.make(
+ Effect.fn("Search.state")(() =>
+ Effect.gen(function* () {
+ const next = {
+ pick: new Map(),
+ wait: new Map>(),
+ recent: [] as Query[],
+ }
+ yield* Effect.addFinalizer(() =>
+ Effect.sync(() => {
+ for (const pick of next.pick.values()) pick.destroy()
+ }),
+ )
+ return next
+ }),
+ ),
+ )
+
+ const rip = Effect.fn("Search.rip")(function* (input: Ripgrep.SearchInput) {
+ const out = yield* rg.search(input)
+ return {
+ items: out.items,
+ partial: out.partial,
+ hasNextPage: false,
+ engine: "ripgrep" as const,
+ }
+ })
+
+ const picker = Effect.fn("Search.picker")(function* (cwd: string) {
+ if (!Fff.available()) return undefined
+
+ const dir = AppFileSystem.resolve(cwd)
+ const current = yield* InstanceState.get(state)
+ const existing = current.pick.get(dir)
+ if (existing) return existing
+
+ const pending = current.wait.get(dir)
+ if (pending) return yield* Deferred.await(pending)
+
+ const gate = yield* Deferred.make()
+ current.wait.set(dir, gate)
+ try {
+ yield* fs.ensureDir(root)
+ const id = key(dir)
+ const made = yield* Effect.sync(() =>
+ Fff.create({
+ basePath: dir,
+ frecencyDbPath: path.join(root, `${id}.frecency.mdb`),
+ historyDbPath: path.join(root, `${id}.history.mdb`),
+ aiMode: true,
+ }),
+ )
+ if (!made.ok) {
+ log.warn("fff init failed", { dir, error: made.error })
+ const err = new Error(made.error)
+ yield* Deferred.fail(gate, err)
+ return yield* Effect.fail(err)
+ }
+
+ const pick = made.value
+
+ const ready = yield* Effect.gen(function* () {
+ const start = yield* Clock.currentTimeMillis
+ while (true) {
+ if (!pick.isScanning()) return true
+ const now = yield* Clock.currentTimeMillis
+ if (now - start >= 5_000) return false
+ yield* Effect.sleep("25 millis")
+ }
+ })
+
+ if (!ready) {
+ pick.destroy()
+ const err = new Error("fff scan timed out")
+ log.warn("fff scan timed out", { dir })
+ yield* Deferred.fail(gate, err)
+ return yield* Effect.fail(err)
+ }
+
+ const git = yield* Effect.sync(() => pick.refreshGitStatus())
+ if (!git.ok) log.warn("fff git refresh failed", { dir, error: git.error })
+
+ current.pick.set(dir, pick)
+ yield* Deferred.succeed(gate, pick)
+ return pick
+ } finally {
+ if (current.wait.get(dir) === gate) current.wait.delete(dir)
+ }
+ })
+
+ const files: Interface["files"] = (input) => rg.files(input)
+ const tree: Interface["tree"] = (input) => rg.tree(input)
+
+ const file: Interface["file"] = Effect.fn("Search.file")(function* (input) {
+ const query = input.query.trim()
+ if (!query) return []
+
+ const pick = yield* picker(input.cwd).pipe(Effect.catch(() => Effect.succeed(undefined)))
+ if (!pick) return undefined
+
+ const dir = AppFileSystem.resolve(input.cwd)
+ const out = yield* Effect.sync(() =>
+ pick.fileSearch(query, {
+ pageIndex: 0,
+ currentFile: input.current, // supports both relative and absolute (relative preferred)
+ pageSize: Math.max(input.limit ?? 100, 100),
+ }),
+ )
+ if (!out.ok) {
+ log.warn("fff file search failed", { dir, query, error: out.error })
+ return undefined
+ }
+
+ const rows: string[] = Array.from(
+ new Set(
+ out.value.items.flatMap((item, idx): string[] => {
+ const score = out.value.scores[idx]
+ if (!score || score.total <= 0) return []
+ return [normalize(item.relativePath)]
+ }),
+ ),
+ )
+ const current = yield* InstanceState.get(state)
+ remember(
+ current,
+ dir,
+ query,
+ rows.map((row) => path.join(dir, row)),
+ )
+ return rows.slice(0, input.limit ?? 100)
+ })
+
+ const search: Interface["search"] = Effect.fn("Search.search")(function* (input) {
+ input.signal?.throwIfAborted()
+ if (input.file?.length) return yield* rip(input)
+
+ const pick = yield* picker(input.cwd).pipe(Effect.catch(() => Effect.succeed(undefined)))
+ if (!pick) return yield* rip(input)
+
+ const dir = AppFileSystem.resolve(input.cwd)
+ const limit = input.limit ?? 100
+
+ const out = yield* Effect.sync(() =>
+ pick.grep(fffGlobbedQuery(input.pattern, input.glob), {
+ mode: "regex",
+ pageSize: limit,
+ timeBudgetMs: 1_500,
+ }),
+ )
+ if (!out.ok) {
+ log.warn("fff grep failed", { dir, pattern: input.pattern, error: out.error })
+ return yield* rip(input)
+ }
+
+ const rows: Item[] = out.value.items.map(item)
+ const regexFallbackError = out.value.regexFallbackError
+
+ if (!rows.length && input.glob?.length) return yield* rip(input)
+
+ const current = yield* InstanceState.get(state)
+ remember(current, dir, input.pattern, Array.from(new Set(rows.map((row) => path.join(dir, row.path.text)))))
+
+ return {
+ items: rows,
+ partial: false,
+ hasNextPage: !!out.value.nextCursor,
+ engine: "fff" as const,
+ regexFallbackError,
+ }
+ })
+
+ const glob: Interface["glob"] = Effect.fn("Search.glob")(function* (input) {
+ input.signal?.throwIfAborted()
+
+ const dir = AppFileSystem.resolve(input.cwd)
+ const limit = input.limit ?? 100
+ const pick = yield* picker(dir).pipe(Effect.catch(() => Effect.succeed(undefined)))
+
+ if (pick) {
+ const out = yield* Effect.sync(() =>
+ pick.fileSearch(include(input.pattern), {
+ pageIndex: 0,
+ pageSize: Math.max(limit * 4, 200),
+ }),
+ )
+
+ if (out.ok) {
+ const rows: string[] = out.value.items.map((item) => item.relativePath)
+
+ if (rows.length > 0) {
+ const current = yield* InstanceState.get(state)
+ remember(
+ current,
+ dir,
+ input.pattern,
+ rows.map((row) => path.join(dir, row)),
+ )
+
+ return {
+ files: rows.slice(0, limit).map((row) => path.join(dir, row)),
+ truncated: rows.length > limit,
+ }
+ }
+ } else {
+ log.warn("fff glob failed", { dir, pattern: input.pattern, error: out.error })
+ }
+ }
+
+ const rows = yield* rg.files({ cwd: dir, glob: [input.pattern], signal: input.signal }).pipe(
+ Stream.take(limit + 1),
+ Stream.runCollect,
+ Effect.map((chunk) => [...chunk]),
+ )
+ const truncated = rows.length > limit
+ if (truncated) rows.length = limit
+
+ const output = yield* Effect.forEach(
+ rows,
+ Effect.fnUntraced(function* (file) {
+ const full = path.join(dir, file)
+ const info = yield* fs.stat(full).pipe(Effect.catch(() => Effect.succeed(undefined)))
+ const time =
+ info?.mtime.pipe(
+ Option.map((item) => item.getTime()),
+ Option.getOrElse(() => 0),
+ ) ?? 0
+ return { file: full, time }
+ }),
+ { concurrency: 16 },
+ )
+ output.sort((a, b) => b.time - a.time)
+ return {
+ files: output.map((item) => item.file),
+ truncated,
+ }
+ })
+
+ const open: Interface["open"] = Effect.fn("Search.open")(function* (input) {
+ const current = yield* InstanceState.get(state)
+ const file = input.cwd
+ ? AppFileSystem.resolve(path.isAbsolute(input.file) ? input.file : path.join(input.cwd, input.file))
+ : AppFileSystem.resolve(input.file)
+ const idx = current.recent.findIndex((item) => item.files.includes(file))
+ if (idx < 0) return
+
+ const row = current.recent[idx]
+ current.recent.splice(idx, 1)
+ const pick = current.pick.get(row.dir)
+ if (!pick) return
+
+ const out = yield* Effect.sync(() => pick.trackQuery(row.text, file))
+ if (!out.ok) log.warn("fff track query failed", { dir: row.dir, query: row.text, file, error: out.error })
+ })
+
+ return Service.of({ files, tree, search, file, glob, open })
+ }),
+)
+
+export const defaultLayer: Layer.Layer = layer.pipe(
+ Layer.provide(Ripgrep.defaultLayer),
+ Layer.provide(AppFileSystem.defaultLayer),
+)
+
+const { runPromise } = makeRuntime(Service, defaultLayer)
+
+export function tree(input: Ripgrep.TreeInput) {
+ return runPromise((svc) => svc.tree(input))
+}
+
+export function search(input: Ripgrep.SearchInput) {
+ return runPromise((svc) => svc.search(input))
+}
+
+export function file(input: FileInput) {
+ return runPromise((svc) => svc.file(input))
+}
+
+export function glob(input: GlobInput) {
+ return runPromise((svc) => svc.glob(input))
+}
+
+export function open(input: { cwd?: string; file: string }) {
+ return runPromise((svc) => svc.open(input))
+}
+
+export * as Search from "./search"
diff --git a/packages/opencode/src/tool/glob.ts b/packages/opencode/src/tool/glob.ts
index ce58331ea328..6601accb4038 100644
--- a/packages/opencode/src/tool/glob.ts
+++ b/packages/opencode/src/tool/glob.ts
@@ -1,9 +1,8 @@
import path from "path"
-import { Effect, Option, Schema } from "effect"
-import * as Stream from "effect/Stream"
+import { Effect, Schema } from "effect"
import { InstanceState } from "@/effect/instance-state"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
-import { Ripgrep } from "../file/ripgrep"
+import { Search } from "../file/search"
import { assertExternalDirectoryEffect } from "./external-directory"
import DESCRIPTION from "./glob.txt"
import * as Tool from "./tool"
@@ -19,9 +18,9 @@ export const Parameters = Schema.Struct({
export const GlobTool = Tool.define(
"glob",
Effect.gen(function* () {
- const rg = yield* Ripgrep.Service
const fs = yield* AppFileSystem.Service
const reference = yield* Reference.Service
+ const searchSvc = yield* Search.Service
return {
description: DESCRIPTION,
@@ -52,36 +51,18 @@ export const GlobTool = Tool.define(
})
const limit = 100
- let truncated = false
- const files = yield* rg.files({ cwd: search, glob: [params.pattern], signal: ctx.abort }).pipe(
- Stream.mapEffect((file) =>
- Effect.gen(function* () {
- const full = path.resolve(search, file)
- const info = yield* fs.stat(full).pipe(Effect.catch(() => Effect.succeed(undefined)))
- const mtime =
- info?.mtime.pipe(
- Option.map((date) => date.getTime()),
- Option.getOrElse(() => 0),
- ) ?? 0
- return { path: full, mtime }
- }),
- ),
- Stream.take(limit + 1),
- Stream.runCollect,
- Effect.map((chunk) => [...chunk]),
- )
-
- if (files.length > limit) {
- truncated = true
- files.length = limit
- }
- files.sort((a, b) => b.mtime - a.mtime)
+ const files = yield* searchSvc.glob({
+ cwd: search,
+ pattern: params.pattern,
+ limit,
+ signal: ctx.abort,
+ })
const output = []
- if (files.length === 0) output.push("No files found")
- if (files.length > 0) {
- output.push(...files.map((file) => file.path))
- if (truncated) {
+ if (files.files.length === 0) output.push("No files found")
+ if (files.files.length > 0) {
+ output.push(...files.files)
+ if (files.truncated) {
output.push("")
output.push(
`(Results are truncated: showing first ${limit} results. Consider using a more specific path or pattern.)`,
@@ -92,8 +73,8 @@ export const GlobTool = Tool.define(
return {
title: path.relative(ins.worktree, search),
metadata: {
- count: files.length,
- truncated,
+ count: files.files.length,
+ truncated: files.truncated,
},
output: output.join("\n"),
}
diff --git a/packages/opencode/src/tool/grep.ts b/packages/opencode/src/tool/grep.ts
index 01aa6a0b72b4..84a08f3c7648 100644
--- a/packages/opencode/src/tool/grep.ts
+++ b/packages/opencode/src/tool/grep.ts
@@ -1,9 +1,8 @@
import path from "path"
-import { Schema } from "effect"
-import { Effect, Option } from "effect"
+import { Effect, Schema } from "effect"
import { InstanceState } from "@/effect/instance-state"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
-import { Ripgrep } from "../file/ripgrep"
+import { Search } from "../file/search"
import { assertExternalDirectoryEffect } from "./external-directory"
import DESCRIPTION from "./grep.txt"
import * as Tool from "./tool"
@@ -25,7 +24,7 @@ export const GrepTool = Tool.define(
"grep",
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
- const rg = yield* Ripgrep.Service
+ const searchSvc = yield* Search.Service
const reference = yield* Reference.Service
return {
@@ -69,7 +68,7 @@ export const GrepTool = Tool.define(
const cwd = info?.type === "Directory" ? search : path.dirname(search)
const file = info?.type === "Directory" ? undefined : [path.relative(cwd, search)]
- const result = yield* rg.search({
+ const result = yield* searchSvc.search({
cwd,
pattern: params.pattern,
glob: params.include ? [params.include] : undefined,
@@ -85,38 +84,15 @@ export const GrepTool = Tool.define(
line: item.line_number,
text: item.lines.text,
}))
- const times = new Map(
- (yield* Effect.forEach(
- [...new Set(rows.map((row) => row.path))],
- Effect.fnUntraced(function* (file) {
- const info = yield* fs.stat(file).pipe(Effect.catch(() => Effect.succeed(undefined)))
- if (!info || info.type === "Directory") return undefined
- return [
- file,
- info.mtime.pipe(
- Option.map((time) => time.getTime()),
- Option.getOrElse(() => 0),
- ) ?? 0,
- ] as const
- }),
- { concurrency: 16 },
- )).filter((entry): entry is readonly [string, number] => Boolean(entry)),
- )
- const matches = rows.flatMap((row) => {
- const mtime = times.get(row.path)
- if (mtime === undefined) return []
- return [{ ...row, mtime }]
- })
-
- matches.sort((a, b) => b.mtime - a.mtime)
const limit = 100
- const truncated = matches.length > limit
- const final = truncated ? matches.slice(0, limit) : matches
+ const truncated = rows.length > limit
+ const final = truncated ? rows.slice(0, limit) : rows
if (final.length === 0) return empty
- const total = matches.length
- const output = [`Found ${total} matches${truncated ? ` (showing first ${limit})` : ""}`]
+ const total = rows.length
+ const hasMore = truncated || result.hasNextPage
+ const output = [`Found ${total} matches${hasMore ? " (more matches available)" : ""}`]
let current = ""
for (const match of final) {
@@ -137,11 +113,23 @@ export const GrepTool = Tool.define(
)
}
+ if (result.hasNextPage) {
+ output.push("")
+ output.push(
+ `(Results truncated. Consider using a more specific path or pattern.)`,
+ )
+ }
+
if (result.partial) {
output.push("")
output.push("(Some paths were inaccessible and skipped)")
}
+ if (result.regexFallbackError) {
+ output.push("")
+ output.push(`(Regex fallback: ${result.regexFallbackError})`)
+ }
+
return {
title: params.pattern,
metadata: {
diff --git a/packages/opencode/src/tool/read.ts b/packages/opencode/src/tool/read.ts
index 8a1b64fec546..fb71aa98bc49 100644
--- a/packages/opencode/src/tool/read.ts
+++ b/packages/opencode/src/tool/read.ts
@@ -8,6 +8,7 @@ import DESCRIPTION from "./read.txt"
import { InstanceState } from "@/effect/instance-state"
import { assertExternalDirectoryEffect } from "./external-directory"
import { Instruction } from "../session/instruction"
+import { Search } from "../file/search"
import { isPdfAttachment, sniffAttachmentMime } from "@/util/media"
import { Reference } from "@/reference/reference"
@@ -41,6 +42,7 @@ export const ReadTool = Tool.define(
const instruction = yield* Instruction.Service
const lsp = yield* LSP.Service
const reference = yield* Reference.Service
+ const search = yield* Search.Service
const scope = yield* Scope.Scope
const miss = Effect.fn("ReadTool.miss")(function* (filepath: string) {
@@ -85,6 +87,7 @@ export const ReadTool = Tool.define(
})
const warm = Effect.fn("ReadTool.warm")(function* (filepath: string) {
+ yield* search.open({ file: filepath }).pipe(Effect.ignore)
yield* lsp.touchFile(filepath).pipe(Effect.ignore, Effect.forkIn(scope))
})
diff --git a/packages/opencode/src/tool/registry.ts b/packages/opencode/src/tool/registry.ts
index 879855c366c6..27b3c69ca297 100644
--- a/packages/opencode/src/tool/registry.ts
+++ b/packages/opencode/src/tool/registry.ts
@@ -36,7 +36,7 @@ import { Effect, Layer, Context } from "effect"
import { FetchHttpClient, HttpClient } from "effect/unstable/http"
import { ChildProcessSpawner } from "effect/unstable/process/ChildProcessSpawner"
import { CrossSpawnSpawner } from "@opencode-ai/core/cross-spawn-spawner"
-import { Ripgrep } from "../file/ripgrep"
+import { Search } from "../file/search"
import { Format } from "../format"
import { InstanceState } from "@/effect/instance-state"
import { Question } from "../question"
@@ -100,7 +100,7 @@ export const layer: Layer.Layer<
| Bus.Service
| HttpClient.HttpClient
| ChildProcessSpawner
- | Ripgrep.Service
+ | Search.Service
| Format.Service
| Truncate.Service
| RuntimeFlags.Service
@@ -387,7 +387,7 @@ export const defaultLayer = Layer.suspend(() =>
Layer.provide(FetchHttpClient.layer),
Layer.provide(Format.defaultLayer),
Layer.provide(CrossSpawnSpawner.defaultLayer),
- Layer.provide(Ripgrep.defaultLayer),
+ Layer.provide(Search.defaultLayer),
Layer.provide(Truncate.defaultLayer),
)
.pipe(Layer.provide(RuntimeFlags.defaultLayer)),
diff --git a/packages/opencode/src/tool/skill.ts b/packages/opencode/src/tool/skill.ts
index 8c41077be5ec..fe2aaadbb146 100644
--- a/packages/opencode/src/tool/skill.ts
+++ b/packages/opencode/src/tool/skill.ts
@@ -2,7 +2,7 @@ import path from "path"
import { pathToFileURL } from "url"
import { Effect, Schema } from "effect"
import * as Stream from "effect/Stream"
-import { Ripgrep } from "../file/ripgrep"
+import { Search } from "../file/search"
import { Skill } from "../skill"
import * as Tool from "./tool"
import DESCRIPTION from "./skill.txt"
@@ -15,7 +15,7 @@ export const SkillTool = Tool.define(
"skill",
Effect.gen(function* () {
const skill = yield* Skill.Service
- const rg = yield* Ripgrep.Service
+ const searchSvc = yield* Search.Service
return {
description: DESCRIPTION,
@@ -39,7 +39,7 @@ export const SkillTool = Tool.define(
const dir = path.dirname(info.location)
const base = pathToFileURL(dir).href
const limit = 10
- const files = yield* rg.files({ cwd: dir, follow: false, hidden: true, signal: ctx.abort }).pipe(
+ const files = yield* searchSvc.files({ cwd: dir, follow: false, hidden: true, signal: ctx.abort }).pipe(
Stream.filter((file) => !file.includes("SKILL.md")),
Stream.map((file) => path.resolve(dir, file)),
Stream.take(limit),
diff --git a/packages/opencode/test/file/index.test.ts b/packages/opencode/test/file/index.test.ts
index b7d531c63d29..f997fd753ae0 100644
--- a/packages/opencode/test/file/index.test.ts
+++ b/packages/opencode/test/file/index.test.ts
@@ -2,6 +2,7 @@ import { afterEach, describe, expect } from "bun:test"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { $ } from "bun"
import { Cause, Effect, Exit, Layer } from "effect"
+import { setTimeout as sleep } from "node:timers/promises"
import path from "path"
import fs from "fs/promises"
import { File } from "../../src/file"
@@ -756,6 +757,8 @@ describe("file/index Filesystem patterns", () => {
expect(yield* search({ query: "fresh", type: "file" })).toEqual([])
yield* Effect.promise(() => fs.writeFile(path.join(test.directory, "fresh.ts"), "fresh", "utf-8"))
+ // fff guarantees eventual search consistency within 100ms after FS change
+ yield* Effect.promise(() => sleep(100))
expect(yield* search({ query: "fresh", type: "file" })).toContain("fresh.ts")
}),
diff --git a/packages/opencode/test/file/search.test.ts b/packages/opencode/test/file/search.test.ts
new file mode 100644
index 000000000000..8a0cfc3b05cc
--- /dev/null
+++ b/packages/opencode/test/file/search.test.ts
@@ -0,0 +1,123 @@
+import { afterEach, describe, expect } from "bun:test"
+import path from "path"
+import { AppFileSystem } from "@opencode-ai/core/filesystem"
+import { Effect, Layer } from "effect"
+import { Fff } from "#fff"
+import { CrossSpawnSpawner } from "@opencode-ai/core/cross-spawn-spawner"
+import { Search } from "../../src/file/search"
+import { Global } from "@opencode-ai/core/global"
+import { disposeAllInstances, provideTmpdirInstance } from "../fixture/fixture"
+import { testEffect } from "../lib/effect"
+
+afterEach(async () => {
+ await disposeAllInstances()
+})
+
+const it = testEffect(Layer.mergeAll(Search.defaultLayer, CrossSpawnSpawner.defaultLayer))
+
+function db(dir: string) {
+ const id = Buffer.from(AppFileSystem.resolve(dir)).toString("base64url")
+ return {
+ frecency: path.join(Global.Path.cache, "fff", `${id}.frecency.mdb`),
+ history: path.join(Global.Path.cache, "fff", `${id}.history.mdb`),
+ }
+}
+
+describe("file.search", () => {
+ it.live("uses fff for Bun-backed grep", () =>
+ provideTmpdirInstance((dir) =>
+ Effect.gen(function* () {
+ expect(Fff.available()).toBe(true)
+ yield* Effect.promise(() => Bun.write(path.join(dir, "src", "match.ts"), "const needle = 1\n"))
+
+ const search = yield* Search.Service
+ const result = yield* search.search({ cwd: dir, pattern: "needle", limit: 10 })
+
+ expect(result.engine).toBe("fff")
+ expect(result.items).toHaveLength(1)
+ expect(result.items[0]?.path.text).toBe("src/match.ts")
+ }),
+ ),
+ )
+
+ it.live("keeps fuzzy file abbreviation matches", () =>
+ provideTmpdirInstance((dir) =>
+ Effect.gen(function* () {
+ expect(Fff.available()).toBe(true)
+ yield* Effect.promise(() => Bun.write(path.join(dir, "README.md"), "hello\n"))
+
+ const search = yield* Search.Service
+ const results = yield* search.file({ cwd: dir, query: "rdme", limit: 10 })
+
+ expect(results).toContain("README.md")
+ }),
+ ),
+ )
+
+ it.live("keeps paging grep results without an explicit limit", () =>
+ provideTmpdirInstance((dir) =>
+ Effect.gen(function* () {
+ expect(Fff.available()).toBe(true)
+ yield* Effect.promise(() =>
+ Bun.write(
+ path.join(dir, "matches.txt"),
+ Array.from({ length: 150 }, (_, idx) => `needle ${idx}\n`).join(""),
+ ),
+ )
+
+ const search = yield* Search.Service
+ const result = yield* search.search({ cwd: dir, pattern: "needle" })
+
+ expect(result.items).toHaveLength(150)
+ }),
+ ),
+ )
+
+ it.live("uses byte ranges for UTF-8 grep submatches", () =>
+ provideTmpdirInstance((dir) =>
+ Effect.gen(function* () {
+ expect(Fff.available()).toBe(true)
+ yield* Effect.promise(() => Bun.write(path.join(dir, "unicode.txt"), "éneedle\n"))
+
+ const search = yield* Search.Service
+ const result = yield* search.search({ cwd: dir, pattern: "needle", limit: 10 })
+
+ expect(result.items[0]?.submatches[0]?.match.text).toBe("needle")
+ }),
+ ),
+ )
+
+ it.live("records query history when a searched file is opened", () =>
+ provideTmpdirInstance((dir) =>
+ Effect.gen(function* () {
+ expect(Fff.available()).toBe(true)
+ yield* Effect.promise(() => Bun.write(path.join(dir, "alpha-target-one.ts"), "export const one = 1\n"))
+ yield* Effect.promise(() => Bun.write(path.join(dir, "alpha-target-two.ts"), "export const two = 2\n"))
+
+ const search = yield* Search.Service
+ const results = yield* search.file({ cwd: dir, query: "alpha target two", limit: 10 })
+
+ expect(results).toContain("alpha-target-two.ts")
+
+ yield* search.open({ cwd: dir, file: "alpha-target-two.ts" })
+ yield* Effect.promise(() => disposeAllInstances())
+
+ const picker = Fff.create({
+ basePath: dir,
+ frecencyDbPath: db(dir).frecency,
+ historyDbPath: db(dir).history,
+ aiMode: true,
+ })
+ expect(picker.ok).toBe(true)
+ if (!picker.ok) return
+
+ const history = picker.value.getHistoricalQuery(0)
+ picker.value.destroy()
+
+ expect(history.ok).toBe(true)
+ if (!history.ok) return
+ expect(history.value).toBe("alpha target two")
+ }),
+ ),
+ )
+})
diff --git a/packages/opencode/test/server/httpapi-file.test.ts b/packages/opencode/test/server/httpapi-file.test.ts
index b2403b9fb2ba..b837ea372be1 100644
--- a/packages/opencode/test/server/httpapi-file.test.ts
+++ b/packages/opencode/test/server/httpapi-file.test.ts
@@ -64,6 +64,7 @@ describe("file HttpApi", () => {
request(FilePaths.findSymbol, tmp.path, { query: "hello" }),
])
+ console.log(files);
expect(text.status).toBe(200)
expect(await text.json()).toContainEqual(expect.objectContaining({ line_number: 1 }))
diff --git a/packages/opencode/test/session/prompt.test.ts b/packages/opencode/test/session/prompt.test.ts
index 891efc18721d..e2ee877abe4e 100644
--- a/packages/opencode/test/session/prompt.test.ts
+++ b/packages/opencode/test/session/prompt.test.ts
@@ -45,7 +45,7 @@ import { Truncate } from "@/tool/truncate"
import * as Log from "@opencode-ai/core/util/log"
import { CrossSpawnSpawner } from "@opencode-ai/core/cross-spawn-spawner"
import * as Database from "../../src/storage/db"
-import { Ripgrep } from "../../src/file/ripgrep"
+import { Search } from "../../src/file/search"
import { Format } from "../../src/format"
import { Reference } from "../../src/reference/reference"
import { TestInstance } from "../fixture/fixture"
@@ -191,7 +191,7 @@ function makeHttp(input?: { processor?: "blocking" }) {
Layer.provide(CrossSpawnSpawner.defaultLayer),
Layer.provide(Git.defaultLayer),
Layer.provide(Reference.defaultLayer),
- Layer.provide(Ripgrep.defaultLayer),
+ Layer.provide(Search.defaultLayer),
Layer.provide(Format.defaultLayer),
Layer.provide(RuntimeFlags.layer({ experimentalEventSystem: true })),
Layer.provideMerge(todo),
diff --git a/packages/opencode/test/session/snapshot-tool-race.test.ts b/packages/opencode/test/session/snapshot-tool-race.test.ts
index 664b02a6cc2f..263524bc57e7 100644
--- a/packages/opencode/test/session/snapshot-tool-race.test.ts
+++ b/packages/opencode/test/session/snapshot-tool-race.test.ts
@@ -56,7 +56,7 @@ import { ToolRegistry } from "@/tool/registry"
import { Truncate } from "@/tool/truncate"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { CrossSpawnSpawner } from "@opencode-ai/core/cross-spawn-spawner"
-import { Ripgrep } from "../../src/file/ripgrep"
+import { Search } from "../../src/file/search"
import { Format } from "../../src/format"
import { Reference } from "../../src/reference/reference"
import { SyncEvent } from "@/sync"
@@ -140,7 +140,7 @@ function makeHttp() {
Layer.provide(CrossSpawnSpawner.defaultLayer),
Layer.provide(Git.defaultLayer),
Layer.provide(Reference.defaultLayer),
- Layer.provide(Ripgrep.defaultLayer),
+ Layer.provide(Search.defaultLayer),
Layer.provide(Format.defaultLayer),
Layer.provide(RuntimeFlags.layer({ experimentalEventSystem: true })),
Layer.provideMerge(todo),
diff --git a/packages/opencode/test/tool/glob.test.ts b/packages/opencode/test/tool/glob.test.ts
index 45dc0b36a9fb..4de5ca356b60 100644
--- a/packages/opencode/test/tool/glob.test.ts
+++ b/packages/opencode/test/tool/glob.test.ts
@@ -4,7 +4,7 @@ import { Cause, Effect, Exit, Layer } from "effect"
import { GlobTool } from "../../src/tool/glob"
import { SessionID, MessageID } from "../../src/session/schema"
import { CrossSpawnSpawner } from "@opencode-ai/core/cross-spawn-spawner"
-import { Ripgrep } from "../../src/file/ripgrep"
+import { Search } from "../../src/file/search"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { Truncate } from "@/tool/truncate"
import { Agent } from "../../src/agent/agent"
@@ -16,7 +16,7 @@ const it = testEffect(
Layer.mergeAll(
CrossSpawnSpawner.defaultLayer,
AppFileSystem.defaultLayer,
- Ripgrep.defaultLayer,
+ Search.defaultLayer,
Truncate.defaultLayer,
Agent.defaultLayer,
Reference.defaultLayer,
diff --git a/packages/opencode/test/tool/grep.test.ts b/packages/opencode/test/tool/grep.test.ts
index 29b5a60db2d7..d1d5e43541c9 100644
--- a/packages/opencode/test/tool/grep.test.ts
+++ b/packages/opencode/test/tool/grep.test.ts
@@ -9,7 +9,7 @@ import { SessionID, MessageID } from "../../src/session/schema"
import { CrossSpawnSpawner } from "@opencode-ai/core/cross-spawn-spawner"
import { Truncate } from "@/tool/truncate"
import { Agent } from "../../src/agent/agent"
-import { Ripgrep } from "../../src/file/ripgrep"
+import { Search } from "../../src/file/search"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { testEffect } from "../lib/effect"
import { Reference } from "@/reference/reference"
@@ -20,7 +20,7 @@ const it = testEffect(
Layer.mergeAll(
CrossSpawnSpawner.defaultLayer,
AppFileSystem.defaultLayer,
- Ripgrep.defaultLayer,
+ Search.defaultLayer,
Truncate.defaultLayer,
Agent.defaultLayer,
Reference.defaultLayer,
diff --git a/packages/opencode/test/tool/read.test.ts b/packages/opencode/test/tool/read.test.ts
index bbfc4c4843d5..2013b99b8927 100644
--- a/packages/opencode/test/tool/read.test.ts
+++ b/packages/opencode/test/tool/read.test.ts
@@ -8,6 +8,7 @@ import { Global } from "@opencode-ai/core/global"
import { Config } from "@/config/config"
import { RuntimeFlags } from "@/effect/runtime-flags"
import { Git } from "@/git"
+import { Search } from "../../src/file/search"
import { LSP } from "@/lsp/lsp"
import { Permission } from "../../src/permission"
import { SessionID, MessageID } from "../../src/session/schema"
@@ -53,6 +54,7 @@ const readLayer = (flags: Partial = {}) =>
Instruction.defaultLayer,
LSP.defaultLayer,
referenceLayer(flags),
+ Search.defaultLayer,
Truncate.defaultLayer,
)
diff --git a/packages/opencode/test/tool/registry.test.ts b/packages/opencode/test/tool/registry.test.ts
index c4267042953a..99b0ada5d2f5 100644
--- a/packages/opencode/test/tool/registry.test.ts
+++ b/packages/opencode/test/tool/registry.test.ts
@@ -25,7 +25,7 @@ import { Instruction } from "@/session/instruction"
import { Bus } from "@/bus"
import { FetchHttpClient } from "effect/unstable/http"
import { Format } from "@/format"
-import { Ripgrep } from "@/file/ripgrep"
+import { Search } from "@/file/search"
import * as Truncate from "@/tool/truncate"
import { InstanceState } from "@/effect/instance-state"
import { Reference } from "@/reference/reference"
@@ -60,7 +60,7 @@ const registryLayer = (flags: Partial = {}) =>
Layer.provide(FetchHttpClient.layer),
Layer.provide(Format.defaultLayer),
Layer.provide(node),
- Layer.provide(Ripgrep.defaultLayer),
+ Layer.provide(Search.defaultLayer),
Layer.provide(Truncate.defaultLayer),
)
.pipe(Layer.provide(RuntimeFlags.layer(flags)))