/ src / hooks / fileSuggestions.ts
fileSuggestions.ts
  1  import { statSync } from 'fs'
  2  import ignore from 'ignore'
  3  import * as path from 'path'
  4  import {
  5    CLAUDE_CONFIG_DIRECTORIES,
  6    loadMarkdownFilesForSubdir,
  7  } from 'src/utils/markdownConfigLoader.js'
  8  import type { SuggestionItem } from '../components/PromptInput/PromptInputFooterSuggestions.js'
  9  import {
 10    CHUNK_MS,
 11    FileIndex,
 12    yieldToEventLoop,
 13  } from '../native-ts/file-index/index.js'
 14  import { logEvent } from '../services/analytics/index.js'
 15  import type { FileSuggestionCommandInput } from '../types/fileSuggestion.js'
 16  import { getGlobalConfig } from '../utils/config.js'
 17  import { getCwd } from '../utils/cwd.js'
 18  import { logForDebugging } from '../utils/debug.js'
 19  import { errorMessage } from '../utils/errors.js'
 20  import { execFileNoThrowWithCwd } from '../utils/execFileNoThrow.js'
 21  import { getFsImplementation } from '../utils/fsOperations.js'
 22  import { findGitRoot, gitExe } from '../utils/git.js'
 23  import {
 24    createBaseHookInput,
 25    executeFileSuggestionCommand,
 26  } from '../utils/hooks.js'
 27  import { logError } from '../utils/log.js'
 28  import { expandPath } from '../utils/path.js'
 29  import { ripGrep } from '../utils/ripgrep.js'
 30  import { getInitialSettings } from '../utils/settings/settings.js'
 31  import { createSignal } from '../utils/signal.js'
 32  
 33  // Lazily constructed singleton
 34  let fileIndex: FileIndex | null = null
 35  
 36  function getFileIndex(): FileIndex {
 37    if (!fileIndex) {
 38      fileIndex = new FileIndex()
 39    }
 40    return fileIndex
 41  }
 42  
 43  let fileListRefreshPromise: Promise<FileIndex> | null = null
 44  // Signal fired when an in-progress index build completes. Lets the
 45  // typeahead UI re-run its last search so partial results upgrade to full.
 46  const indexBuildComplete = createSignal()
 47  export const onIndexBuildComplete = indexBuildComplete.subscribe
 48  let cacheGeneration = 0
 49  
 50  // Background fetch for untracked files
 51  let untrackedFetchPromise: Promise<void> | null = null
 52  
 53  // Store tracked files so we can rebuild index with untracked
 54  let cachedTrackedFiles: string[] = []
 55  // Store config files so mergeUntrackedIntoNormalizedCache preserves them
 56  let cachedConfigFiles: string[] = []
 57  // Store tracked directories so mergeUntrackedIntoNormalizedCache doesn't
 58  // recompute ~270k path.dirname() calls on each merge
 59  let cachedTrackedDirs: string[] = []
 60  
 61  // Cache for .ignore/.rgignore patterns (keyed by repoRoot:cwd)
 62  let ignorePatternsCache: ReturnType<typeof ignore> | null = null
 63  let ignorePatternsCacheKey: string | null = null
 64  
 65  // Throttle state for background refresh. .git/index mtime triggers an
 66  // immediate refresh when tracked files change (add/checkout/commit/rm).
 67  // The time floor still refreshes every 5s to pick up untracked files,
 68  // which don't bump the index.
 69  let lastRefreshMs = 0
 70  let lastGitIndexMtime: number | null = null
 71  
 72  // Signatures of the path lists loaded into the Rust index. Two separate
 73  // signatures because the two loadFromFileList call sites use differently
 74  // structured arrays — a shared signature would ping-pong and never match.
 75  // Skips nucleo.restart() when git ls-files returns an unchanged list
 76  // (e.g. `git add` of an already-tracked file bumps index mtime but not the list).
 77  let loadedTrackedSignature: string | null = null
 78  let loadedMergedSignature: string | null = null
 79  
 80  /**
 81   * Clear all file suggestion caches.
 82   * Call this when resuming a session to ensure fresh file discovery.
 83   */
 84  export function clearFileSuggestionCaches(): void {
 85    fileIndex = null
 86    fileListRefreshPromise = null
 87    cacheGeneration++
 88    untrackedFetchPromise = null
 89    cachedTrackedFiles = []
 90    cachedConfigFiles = []
 91    cachedTrackedDirs = []
 92    indexBuildComplete.clear()
 93    ignorePatternsCache = null
 94    ignorePatternsCacheKey = null
 95    lastRefreshMs = 0
 96    lastGitIndexMtime = null
 97    loadedTrackedSignature = null
 98    loadedMergedSignature = null
 99  }
100  
101  /**
102   * Content hash of a path list. A length|first|last sample misses renames of
103   * middle files (same length, same endpoints → stale entry stuck in nucleo).
104   *
105   * Samples every Nth path (plus length). On a 346k-path list this hashes ~700
106   * paths instead of 14MB — enough to catch git operations (checkout, rebase,
107   * add/rm) while running in <1ms. A single mid-list rename that happens to
108   * fall between samples will miss the rebuild, but the 5s refresh floor picks
109   * it up on the next cycle.
110   */
111  export function pathListSignature(paths: string[]): string {
112    const n = paths.length
113    const stride = Math.max(1, Math.floor(n / 500))
114    let h = 0x811c9dc5 | 0
115    for (let i = 0; i < n; i += stride) {
116      const p = paths[i]!
117      for (let j = 0; j < p.length; j++) {
118        h = ((h ^ p.charCodeAt(j)) * 0x01000193) | 0
119      }
120      h = (h * 0x01000193) | 0
121    }
122    // Stride starts at 0 (first path always hashed); explicitly include last
123    // so single-file add/rm at the tail is caught
124    if (n > 0) {
125      const last = paths[n - 1]!
126      for (let j = 0; j < last.length; j++) {
127        h = ((h ^ last.charCodeAt(j)) * 0x01000193) | 0
128      }
129    }
130    return `${n}:${(h >>> 0).toString(16)}`
131  }
132  
133  /**
134   * Stat .git/index to detect git state changes without spawning git ls-files.
135   * Returns null for worktrees (.git is a file → ENOTDIR), fresh repos with no
136   * index yet (ENOENT), and non-git dirs — caller falls back to time throttle.
137   */
138  function getGitIndexMtime(): number | null {
139    const repoRoot = findGitRoot(getCwd())
140    if (!repoRoot) return null
141    try {
142      // eslint-disable-next-line custom-rules/no-sync-fs -- mtimeMs is the operation here, not a pre-check. findGitRoot above already stat-walks synchronously; one more stat is marginal vs spawning git ls-files on every keystroke. Async would force startBackgroundCacheRefresh to become async, breaking the synchronous fileListRefreshPromise contract at the cold-start await site.
143      return statSync(path.join(repoRoot, '.git', 'index')).mtimeMs
144    } catch {
145      return null
146    }
147  }
148  
149  /**
150   * Normalize git paths relative to originalCwd
151   */
152  function normalizeGitPaths(
153    files: string[],
154    repoRoot: string,
155    originalCwd: string,
156  ): string[] {
157    if (originalCwd === repoRoot) {
158      return files
159    }
160    return files.map(f => {
161      const absolutePath = path.join(repoRoot, f)
162      return path.relative(originalCwd, absolutePath)
163    })
164  }
165  
166  /**
167   * Merge already-normalized untracked files into the cache
168   */
169  async function mergeUntrackedIntoNormalizedCache(
170    normalizedUntracked: string[],
171  ): Promise<void> {
172    if (normalizedUntracked.length === 0) return
173    if (!fileIndex || cachedTrackedFiles.length === 0) return
174  
175    const untrackedDirs = await getDirectoryNamesAsync(normalizedUntracked)
176    const allPaths = [
177      ...cachedTrackedFiles,
178      ...cachedConfigFiles,
179      ...cachedTrackedDirs,
180      ...normalizedUntracked,
181      ...untrackedDirs,
182    ]
183    const sig = pathListSignature(allPaths)
184    if (sig === loadedMergedSignature) {
185      logForDebugging(
186        `[FileIndex] skipped index rebuild — merged paths unchanged`,
187      )
188      return
189    }
190    await fileIndex.loadFromFileListAsync(allPaths).done
191    loadedMergedSignature = sig
192    logForDebugging(
193      `[FileIndex] rebuilt index with ${cachedTrackedFiles.length} tracked + ${normalizedUntracked.length} untracked files`,
194    )
195  }
196  
197  /**
198   * Load ripgrep-specific ignore patterns from .ignore or .rgignore files
199   * Returns an ignore instance if patterns were found, null otherwise
200   * Results are cached per repoRoot:cwd combination
201   */
202  async function loadRipgrepIgnorePatterns(
203    repoRoot: string,
204    cwd: string,
205  ): Promise<ReturnType<typeof ignore> | null> {
206    const cacheKey = `${repoRoot}:${cwd}`
207  
208    // Return cached result if available
209    if (ignorePatternsCacheKey === cacheKey) {
210      return ignorePatternsCache
211    }
212  
213    const fs = getFsImplementation()
214    const ignoreFiles = ['.ignore', '.rgignore']
215    const directories = [...new Set([repoRoot, cwd])]
216  
217    const ig = ignore()
218    let hasPatterns = false
219  
220    const paths = directories.flatMap(dir =>
221      ignoreFiles.map(f => path.join(dir, f)),
222    )
223    const contents = await Promise.all(
224      paths.map(p => fs.readFile(p, { encoding: 'utf8' }).catch(() => null)),
225    )
226    for (const [i, content] of contents.entries()) {
227      if (content === null) continue
228      ig.add(content)
229      hasPatterns = true
230      logForDebugging(`[FileIndex] loaded ignore patterns from ${paths[i]}`)
231    }
232  
233    const result = hasPatterns ? ig : null
234    ignorePatternsCache = result
235    ignorePatternsCacheKey = cacheKey
236  
237    return result
238  }
239  
240  /**
241   * Get files using git ls-files (much faster than ripgrep for git repos)
242   * Returns tracked files immediately, fetches untracked in background
243   * @param respectGitignore If true, excludes gitignored files from untracked results
244   *
245   * Note: Unlike ripgrep --follow, git ls-files doesn't follow symlinks.
246   * This is intentional as git tracks symlinks as symlinks.
247   */
248  async function getFilesUsingGit(
249    abortSignal: AbortSignal,
250    respectGitignore: boolean,
251  ): Promise<string[] | null> {
252    const startTime = Date.now()
253    logForDebugging(`[FileIndex] getFilesUsingGit called`)
254  
255    // Check if we're in a git repo. findGitRoot is LRU-memoized per path.
256    const repoRoot = findGitRoot(getCwd())
257    if (!repoRoot) {
258      logForDebugging(`[FileIndex] not a git repo, returning null`)
259      return null
260    }
261  
262    try {
263      const cwd = getCwd()
264  
265      // Get tracked files (fast - reads from git index)
266      // Run from repoRoot so paths are relative to repo root, not CWD
267      const lsFilesStart = Date.now()
268      const trackedResult = await execFileNoThrowWithCwd(
269        gitExe(),
270        ['-c', 'core.quotepath=false', 'ls-files', '--recurse-submodules'],
271        { timeout: 5000, abortSignal, cwd: repoRoot },
272      )
273      logForDebugging(
274        `[FileIndex] git ls-files (tracked) took ${Date.now() - lsFilesStart}ms`,
275      )
276  
277      if (trackedResult.code !== 0) {
278        logForDebugging(
279          `[FileIndex] git ls-files failed (code=${trackedResult.code}, stderr=${trackedResult.stderr}), falling back to ripgrep`,
280        )
281        return null
282      }
283  
284      const trackedFiles = trackedResult.stdout.trim().split('\n').filter(Boolean)
285  
286      // Normalize paths relative to the current working directory
287      let normalizedTracked = normalizeGitPaths(trackedFiles, repoRoot, cwd)
288  
289      // Apply .ignore/.rgignore patterns if present (faster than falling back to ripgrep)
290      const ignorePatterns = await loadRipgrepIgnorePatterns(repoRoot, cwd)
291      if (ignorePatterns) {
292        const beforeCount = normalizedTracked.length
293        normalizedTracked = ignorePatterns.filter(normalizedTracked)
294        logForDebugging(
295          `[FileIndex] applied ignore patterns: ${beforeCount} -> ${normalizedTracked.length} files`,
296        )
297      }
298  
299      // Cache tracked files for later merge with untracked
300      cachedTrackedFiles = normalizedTracked
301  
302      const duration = Date.now() - startTime
303      logForDebugging(
304        `[FileIndex] git ls-files: ${normalizedTracked.length} tracked files in ${duration}ms`,
305      )
306  
307      logEvent('tengu_file_suggestions_git_ls_files', {
308        file_count: normalizedTracked.length,
309        tracked_count: normalizedTracked.length,
310        untracked_count: 0,
311        duration_ms: duration,
312      })
313  
314      // Start background fetch for untracked files (don't await)
315      if (!untrackedFetchPromise) {
316        const untrackedArgs = respectGitignore
317          ? [
318              '-c',
319              'core.quotepath=false',
320              'ls-files',
321              '--others',
322              '--exclude-standard',
323            ]
324          : ['-c', 'core.quotepath=false', 'ls-files', '--others']
325  
326        const generation = cacheGeneration
327        untrackedFetchPromise = execFileNoThrowWithCwd(gitExe(), untrackedArgs, {
328          timeout: 10000,
329          cwd: repoRoot,
330        })
331          .then(async untrackedResult => {
332            if (generation !== cacheGeneration) {
333              return // Cache was cleared; don't merge stale untracked files
334            }
335            if (untrackedResult.code === 0) {
336              const rawUntrackedFiles = untrackedResult.stdout
337                .trim()
338                .split('\n')
339                .filter(Boolean)
340  
341              // Normalize paths BEFORE applying ignore patterns (consistent with tracked files)
342              let normalizedUntracked = normalizeGitPaths(
343                rawUntrackedFiles,
344                repoRoot,
345                cwd,
346              )
347  
348              // Apply .ignore/.rgignore patterns to normalized untracked files
349              const ignorePatterns = await loadRipgrepIgnorePatterns(
350                repoRoot,
351                cwd,
352              )
353              if (ignorePatterns && normalizedUntracked.length > 0) {
354                const beforeCount = normalizedUntracked.length
355                normalizedUntracked = ignorePatterns.filter(normalizedUntracked)
356                logForDebugging(
357                  `[FileIndex] applied ignore patterns to untracked: ${beforeCount} -> ${normalizedUntracked.length} files`,
358                )
359              }
360  
361              logForDebugging(
362                `[FileIndex] background untracked fetch: ${normalizedUntracked.length} files`,
363              )
364              // Pass already-normalized files directly to merge function
365              void mergeUntrackedIntoNormalizedCache(normalizedUntracked)
366            }
367          })
368          .catch(error => {
369            logForDebugging(
370              `[FileIndex] background untracked fetch failed: ${error}`,
371            )
372          })
373          .finally(() => {
374            untrackedFetchPromise = null
375          })
376      }
377  
378      return normalizedTracked
379    } catch (error) {
380      logForDebugging(`[FileIndex] git ls-files error: ${errorMessage(error)}`)
381      return null
382    }
383  }
384  
385  /**
386   * This function collects all parent directories for each file path
387   * and returns a list of unique directory names with a trailing separator.
388   * For example, if the input is ['src/index.js', 'src/utils/helpers.js'],
389   * the output will be ['src/', 'src/utils/'].
390   * @param files An array of file paths
391   * @returns An array of unique directory names with a trailing separator
392   */
393  export function getDirectoryNames(files: string[]): string[] {
394    const directoryNames = new Set<string>()
395    collectDirectoryNames(files, 0, files.length, directoryNames)
396    return [...directoryNames].map(d => d + path.sep)
397  }
398  
399  /**
400   * Async variant: yields every ~10k files so 270k+ file lists don't block
401   * the main thread for >10ms at a time.
402   */
403  export async function getDirectoryNamesAsync(
404    files: string[],
405  ): Promise<string[]> {
406    const directoryNames = new Set<string>()
407    // Time-based chunking: yield after CHUNK_MS of work so slow machines get
408    // smaller chunks and stay responsive.
409    let chunkStart = performance.now()
410    for (let i = 0; i < files.length; i++) {
411      collectDirectoryNames(files, i, i + 1, directoryNames)
412      if ((i & 0xff) === 0xff && performance.now() - chunkStart > CHUNK_MS) {
413        await yieldToEventLoop()
414        chunkStart = performance.now()
415      }
416    }
417    return [...directoryNames].map(d => d + path.sep)
418  }
419  
420  function collectDirectoryNames(
421    files: string[],
422    start: number,
423    end: number,
424    out: Set<string>,
425  ): void {
426    for (let i = start; i < end; i++) {
427      let currentDir = path.dirname(files[i]!)
428      // Early exit if we've already processed this directory and all its parents.
429      // Root detection: path.dirname returns its input at the root (fixed point),
430      // so we stop when dirname stops changing. Checking this before add() keeps
431      // the root out of the result set (matching the old path.parse().root guard).
432      // This avoids path.parse() which allocates a 5-field object per file.
433      while (currentDir !== '.' && !out.has(currentDir)) {
434        const parent = path.dirname(currentDir)
435        if (parent === currentDir) break
436        out.add(currentDir)
437        currentDir = parent
438      }
439    }
440  }
441  
442  /**
443   * Gets additional files from Claude config directories
444   */
445  async function getClaudeConfigFiles(cwd: string): Promise<string[]> {
446    const markdownFileArrays = await Promise.all(
447      CLAUDE_CONFIG_DIRECTORIES.map(subdir =>
448        loadMarkdownFilesForSubdir(subdir, cwd),
449      ),
450    )
451    return markdownFileArrays.flatMap(markdownFiles =>
452      markdownFiles.map(f => f.filePath),
453    )
454  }
455  
456  /**
457   * Gets project files using git ls-files (fast) or ripgrep (fallback)
458   */
459  async function getProjectFiles(
460    abortSignal: AbortSignal,
461    respectGitignore: boolean,
462  ): Promise<string[]> {
463    logForDebugging(
464      `[FileIndex] getProjectFiles called, respectGitignore=${respectGitignore}`,
465    )
466  
467    // Try git ls-files first (much faster for git repos)
468    const gitFiles = await getFilesUsingGit(abortSignal, respectGitignore)
469    if (gitFiles !== null) {
470      logForDebugging(
471        `[FileIndex] using git ls-files result (${gitFiles.length} files)`,
472      )
473      return gitFiles
474    }
475  
476    // Fall back to ripgrep
477    logForDebugging(
478      `[FileIndex] git ls-files returned null, falling back to ripgrep`,
479    )
480    const startTime = Date.now()
481    const rgArgs = [
482      '--files',
483      '--follow',
484      '--hidden',
485      '--glob',
486      '!.git/',
487      '--glob',
488      '!.svn/',
489      '--glob',
490      '!.hg/',
491      '--glob',
492      '!.bzr/',
493      '--glob',
494      '!.jj/',
495      '--glob',
496      '!.sl/',
497    ]
498    if (!respectGitignore) {
499      rgArgs.push('--no-ignore-vcs')
500    }
501  
502    const files = await ripGrep(rgArgs, '.', abortSignal)
503    const relativePaths = files.map(f => path.relative(getCwd(), f))
504  
505    const duration = Date.now() - startTime
506    logForDebugging(
507      `[FileIndex] ripgrep: ${relativePaths.length} files in ${duration}ms`,
508    )
509  
510    logEvent('tengu_file_suggestions_ripgrep', {
511      file_count: relativePaths.length,
512      duration_ms: duration,
513    })
514  
515    return relativePaths
516  }
517  
518  /**
519   * Gets both files and their directory paths for providing path suggestions
520   * Uses git ls-files for git repos (fast) or ripgrep as fallback
521   * Returns a FileIndex populated for fast fuzzy search
522   */
523  export async function getPathsForSuggestions(): Promise<FileIndex> {
524    const signal = AbortSignal.timeout(10_000)
525    const index = getFileIndex()
526  
527    try {
528      // Check project settings first, then fall back to global config
529      const projectSettings = getInitialSettings()
530      const globalConfig = getGlobalConfig()
531      const respectGitignore =
532        projectSettings.respectGitignore ?? globalConfig.respectGitignore ?? true
533  
534      const cwd = getCwd()
535      const [projectFiles, configFiles] = await Promise.all([
536        getProjectFiles(signal, respectGitignore),
537        getClaudeConfigFiles(cwd),
538      ])
539  
540      // Cache for mergeUntrackedIntoNormalizedCache
541      cachedConfigFiles = configFiles
542  
543      const allFiles = [...projectFiles, ...configFiles]
544      const directories = await getDirectoryNamesAsync(allFiles)
545      cachedTrackedDirs = directories
546      const allPathsList = [...directories, ...allFiles]
547  
548      // Skip rebuild when the list is unchanged. This is the common case
549      // during a typing session — git ls-files returns the same output.
550      const sig = pathListSignature(allPathsList)
551      if (sig !== loadedTrackedSignature) {
552        // Await the full build so cold-start returns complete results. The
553        // build yields every ~4ms so the UI stays responsive — user can keep
554        // typing during the ~120ms wait without input lag.
555        await index.loadFromFileListAsync(allPathsList).done
556        loadedTrackedSignature = sig
557        // We just replaced the merged index with tracked-only data. Force
558        // the next untracked merge to rebuild even if its own sig matches.
559        loadedMergedSignature = null
560      } else {
561        logForDebugging(
562          `[FileIndex] skipped index rebuild — tracked paths unchanged`,
563        )
564      }
565    } catch (error) {
566      logError(error)
567    }
568  
569    return index
570  }
571  
572  /**
573   * Finds the common prefix between two strings
574   */
575  function findCommonPrefix(a: string, b: string): string {
576    const minLength = Math.min(a.length, b.length)
577    let i = 0
578    while (i < minLength && a[i] === b[i]) {
579      i++
580    }
581    return a.substring(0, i)
582  }
583  
584  /**
585   * Finds the longest common prefix among an array of suggestion items
586   */
587  export function findLongestCommonPrefix(suggestions: SuggestionItem[]): string {
588    if (suggestions.length === 0) return ''
589  
590    const strings = suggestions.map(item => item.displayText)
591    let prefix = strings[0]!
592    for (let i = 1; i < strings.length; i++) {
593      const currentString = strings[i]!
594      prefix = findCommonPrefix(prefix, currentString)
595      if (prefix === '') return ''
596    }
597    return prefix
598  }
599  
600  /**
601   * Creates a file suggestion item
602   */
603  function createFileSuggestionItem(
604    filePath: string,
605    score?: number,
606  ): SuggestionItem {
607    return {
608      id: `file-${filePath}`,
609      displayText: filePath,
610      metadata: score !== undefined ? { score } : undefined,
611    }
612  }
613  
614  /**
615   * Find matching files and folders for a given query using the TS file index
616   */
617  const MAX_SUGGESTIONS = 15
618  function findMatchingFiles(
619    fileIndex: FileIndex,
620    partialPath: string,
621  ): SuggestionItem[] {
622    const results = fileIndex.search(partialPath, MAX_SUGGESTIONS)
623    return results.map(result =>
624      createFileSuggestionItem(result.path, result.score),
625    )
626  }
627  
628  /**
629   * Starts a background refresh of the file index cache if not already in progress.
630   *
631   * Throttled: when a cache already exists, we skip the refresh unless git state
632   * has actually changed. This prevents every keystroke from spawning git ls-files
633   * and rebuilding the nucleo index.
634   */
635  const REFRESH_THROTTLE_MS = 5_000
636  export function startBackgroundCacheRefresh(): void {
637    if (fileListRefreshPromise) return
638  
639    // Throttle only when a cache exists — cold start must always populate.
640    // Refresh immediately when .git/index mtime changed (tracked files).
641    // Otherwise refresh at most once per 5s — this floor picks up new UNTRACKED
642    // files, which don't bump .git/index. The signature checks downstream skip
643    // the rebuild when the 5s refresh finds nothing actually changed.
644    const indexMtime = getGitIndexMtime()
645    if (fileIndex) {
646      const gitStateChanged =
647        indexMtime !== null && indexMtime !== lastGitIndexMtime
648      if (!gitStateChanged && Date.now() - lastRefreshMs < REFRESH_THROTTLE_MS) {
649        return
650      }
651    }
652  
653    const generation = cacheGeneration
654    const refreshStart = Date.now()
655    // Ensure the FileIndex singleton exists — it's progressively queryable
656    // via readyCount while the build runs. Callers searching early get partial
657    // results; indexBuildComplete fires after .done so they can re-search.
658    getFileIndex()
659    fileListRefreshPromise = getPathsForSuggestions()
660      .then(result => {
661        if (generation !== cacheGeneration) {
662          return result // Cache was cleared; don't overwrite with stale data
663        }
664        fileListRefreshPromise = null
665        indexBuildComplete.emit()
666        // Commit the start-time mtime observation on success. If git state
667        // changed mid-refresh, the next call will see the newer mtime and
668        // correctly refresh again.
669        lastGitIndexMtime = indexMtime
670        lastRefreshMs = Date.now()
671        logForDebugging(
672          `[FileIndex] cache refresh completed in ${Date.now() - refreshStart}ms`,
673        )
674        return result
675      })
676      .catch(error => {
677        logForDebugging(
678          `[FileIndex] Cache refresh failed: ${errorMessage(error)}`,
679        )
680        logError(error)
681        if (generation === cacheGeneration) {
682          fileListRefreshPromise = null // Allow retry on next call
683        }
684        return getFileIndex()
685      })
686  }
687  
688  /**
689   * Gets the top-level files and directories in the current working directory
690   * @returns Array of file/directory paths in the current directory
691   */
692  async function getTopLevelPaths(): Promise<string[]> {
693    const fs = getFsImplementation()
694    const cwd = getCwd()
695  
696    try {
697      const entries = await fs.readdir(cwd)
698      return entries.map(entry => {
699        const fullPath = path.join(cwd, entry.name)
700        const relativePath = path.relative(cwd, fullPath)
701        // Add trailing separator for directories
702        return entry.isDirectory() ? relativePath + path.sep : relativePath
703      })
704    } catch (error) {
705      logError(error as Error)
706      return []
707    }
708  }
709  
710  /**
711   * Generate file suggestions for the current input and cursor position
712   * @param partialPath The partial file path to match
713   * @param showOnEmpty Whether to show suggestions even if partialPath is empty (used for @ symbol)
714   */
715  export async function generateFileSuggestions(
716    partialPath: string,
717    showOnEmpty = false,
718  ): Promise<SuggestionItem[]> {
719    // If input is empty and we don't want to show suggestions on empty, return nothing
720    if (!partialPath && !showOnEmpty) {
721      return []
722    }
723  
724    // Use custom command directly if configured. We don't mix in our config files
725    // because the command returns pre-ranked results using its own search logic.
726    if (getInitialSettings().fileSuggestion?.type === 'command') {
727      const input: FileSuggestionCommandInput = {
728        ...createBaseHookInput(),
729        query: partialPath,
730      }
731      const results = await executeFileSuggestionCommand(input)
732      return results.slice(0, MAX_SUGGESTIONS).map(createFileSuggestionItem)
733    }
734  
735    // If the partial path is empty or just a dot, return current directory suggestions
736    if (partialPath === '' || partialPath === '.' || partialPath === './') {
737      const topLevelPaths = await getTopLevelPaths()
738      startBackgroundCacheRefresh()
739      return topLevelPaths.slice(0, MAX_SUGGESTIONS).map(createFileSuggestionItem)
740    }
741  
742    const startTime = Date.now()
743  
744    try {
745      // Kick a background refresh. The index is progressively queryable —
746      // searches during build return partial results from ready chunks, and
747      // the typeahead callback (setOnIndexBuildComplete) re-fires the search
748      // when the build finishes to upgrade partial → full.
749      const wasBuilding = fileListRefreshPromise !== null
750      startBackgroundCacheRefresh()
751  
752      // Handle both './' and '.\'
753      let normalizedPath = partialPath
754      const currentDirPrefix = '.' + path.sep
755      if (partialPath.startsWith(currentDirPrefix)) {
756        normalizedPath = partialPath.substring(2)
757      }
758  
759      // Handle tilde expansion for home directory
760      if (normalizedPath.startsWith('~')) {
761        normalizedPath = expandPath(normalizedPath)
762      }
763  
764      const matches = fileIndex
765        ? findMatchingFiles(fileIndex, normalizedPath)
766        : []
767  
768      const duration = Date.now() - startTime
769      logForDebugging(
770        `[FileIndex] generateFileSuggestions: ${matches.length} results in ${duration}ms (${wasBuilding ? 'partial' : 'full'} index)`,
771      )
772      logEvent('tengu_file_suggestions_query', {
773        duration_ms: duration,
774        cache_hit: !wasBuilding,
775        result_count: matches.length,
776        query_length: partialPath.length,
777      })
778  
779      return matches
780    } catch (error) {
781      logError(error)
782      return []
783    }
784  }
785  
786  /**
787   * Apply a file suggestion to the input
788   */
789  export function applyFileSuggestion(
790    suggestion: string | SuggestionItem,
791    input: string,
792    partialPath: string,
793    startPos: number,
794    onInputChange: (value: string) => void,
795    setCursorOffset: (offset: number) => void,
796  ): void {
797    // Extract suggestion text from string or SuggestionItem
798    const suggestionText =
799      typeof suggestion === 'string' ? suggestion : suggestion.displayText
800  
801    // Replace the partial path with the selected file path
802    const newInput =
803      input.substring(0, startPos) +
804      suggestionText +
805      input.substring(startPos + partialPath.length)
806    onInputChange(newInput)
807  
808    // Move cursor to end of the file path
809    const newCursorPos = startPos + suggestionText.length
810    setCursorOffset(newCursorPos)
811  }