Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 16 additions & 2 deletions crates/fff-nvim/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,9 @@ use fff::frecency::FrecencyTracker;
use fff::path_utils::expand_tilde;
use fff::query_tracker::QueryTracker;
use fff::{
DbHealthChecker, Error, FFFMode, FileSearchConfig, FuzzySearchOptions, PaginationArgs,
QueryParser, Score, SearchResult, SharedFrecency, SharedPicker, SharedQueryTracker,
DbHealthChecker, Error, FFFMode, FileSearchConfig, FuzzySearchOptions, GrepConfig,
PaginationArgs, QueryParser, Score, SearchResult, SharedFrecency, SharedPicker,
SharedQueryTracker,
};
use mimalloc::MiMalloc;
use mlua::prelude::*;
Expand Down Expand Up @@ -582,6 +583,18 @@ pub fn get_historical_grep_query(_: &Lua, offset: usize) -> LuaResult<Option<Str
.into_lua_result()
}

/// Parse a grep query string and return its text portion (with constraints stripped).
///
/// Uses the Rust `GrepConfig` parser as the single source of truth, so Lua
/// code never needs to re-implement constraint detection.
pub fn parse_grep_query(lua: &Lua, query: String) -> LuaResult<LuaTable> {
let parser = QueryParser::new(GrepConfig);
let parsed = parser.parse(&query);
let table = lua.create_table()?;
table.set("grep_text", parsed.grep_text())?;
Ok(table)
}

pub fn wait_for_initial_scan(_: &Lua, timeout_ms: Option<u64>) -> LuaResult<bool> {
// Extract the scan signal Arc WITHOUT holding the read lock, so the
// scan thread can acquire the write lock to store its results.
Expand Down Expand Up @@ -822,6 +835,7 @@ fn create_exports(lua: &Lua) -> LuaResult<LuaTable> {
exports.set("health_check", lua.create_function(health_check)?)?;
exports.set("shorten_path", lua.create_function(shorten_path)?)?;
exports.set("hex_dump", lua.create_function(hex_dump::hex_dump)?)?;
exports.set("parse_grep_query", lua.create_function(parse_grep_query)?)?;

Ok(exports)
}
Expand Down
1 change: 1 addition & 0 deletions lua/fff/fuzzy.lua
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ M.get_git_root = rust_module.get_git_root

-- Grep functions
M.live_grep = rust_module.live_grep
M.parse_grep_query = rust_module.parse_grep_query

-- Utility functions
M.health_check = rust_module.health_check
Expand Down
17 changes: 7 additions & 10 deletions lua/fff/location_utils.lua
Original file line number Diff line number Diff line change
Expand Up @@ -178,16 +178,13 @@ function M.highlight_grep_matches(bufnr, location, namespace)

local query = location.grep_query

-- Extract the actual search text from the grep query (strip file constraints like *.rs /src/)
-- The query parser uses space-separated tokens; the first non-constraint token is the pattern.
-- Simple heuristic: strip tokens that look like constraints (start with *, /, or !)
local search_text = query
local parts = vim.split(query, '%s+')
local text_parts = {}
for _, part in ipairs(parts) do
if part ~= '' and not part:match('^[%*!/]') and not part:match('^%.') then table.insert(text_parts, part) end
end
if #text_parts > 0 then search_text = text_parts[1] end
-- Use the Rust GrepConfig parser as the single source of truth for
-- stripping constraint tokens. This avoids duplicating constraint
-- detection in Lua, which would break whenever a new token type is added.
local fuzzy = require('fff.fuzzy')
local parsed = fuzzy.parse_grep_query(query)
local search_text = parsed.grep_text
if search_text == '' then search_text = query end

if not search_text or search_text == '' then return nil end

Expand Down
Loading