cache

package
v0.0.0-...-dbba89f Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 14, 2026 License: MIT Imports: 12 Imported by: 0

Documentation

Overview

Package parser - Caching system for frequently parsed content

Package cache - Cached parser wrapper that integrates caching with existing parsers

Example (BasicCaching)

Example_basicCaching demonstrates basic caching operations

package main

import (
	"context"
	"fmt"
	"time"

	"github.com/NortonBen/ai-memory-go/parser/cache"
	"github.com/NortonBen/ai-memory-go/schema"
)

func main() {
	// Create a cache with default configuration
	config := cache.DefaultCacheConfig()
	config.MaxSize = 2 // Set small size for eviction demo
	pc := cache.NewInMemoryParsingCache(config)
	defer pc.Close()

	ctx := context.Background()

	// Create some test chunks
	chunks := []*schema.Chunk{
		{ID: "1", Content: "Hello, world!", Type: schema.ChunkTypeText},
		{ID: "2", Content: "This is cached content.", Type: schema.ChunkTypeText},
	}

	// Store chunks in cache
	err := pc.Set(ctx, "example-key", chunks, map[string]interface{}{
		"example":   "metadata",
		"cached_at": time.Now(),
	})
	if err != nil {
		fmt.Printf("Error setting cache: %v\n", err)
		return
	}

	// Retrieve chunks from cache
	retrievedChunks, found := pc.Get(ctx, "example-key")
	if found {
		fmt.Printf("Cache hit! Retrieved %d chunks\n", len(retrievedChunks))
		fmt.Printf("First chunk: %s\n", retrievedChunks[0].Content)
	} else {
		fmt.Println("Cache miss")
	}

	// Get cache metrics
	metrics := pc.GetMetrics()
	fmt.Printf("Cache hits: %d, misses: %d, hit rate: %.2f%%\n",
		metrics.Hits, metrics.Misses, metrics.HitRate*100)

}
Output:
Cache hit! Retrieved 2 chunks
First chunk: Hello, world!
Cache hits: 1, misses: 0, hit rate: 100.00%
Example (BatchCaching)

Example_batchCaching demonstrates batch parsing with caching

package main

import (
	"context"
	"fmt"
	"os"
	"path/filepath"

	"github.com/NortonBen/ai-memory-go/parser/cache"
	"github.com/NortonBen/ai-memory-go/parser/core"
	"github.com/NortonBen/ai-memory-go/schema"
)

func main() {
	// Create temporary files
	tmpDir := os.TempDir()
	files := make([]string, 3)

	for i := 0; i < 3; i++ {
		filePath := filepath.Join(tmpDir, fmt.Sprintf("batch_example_%d.txt", i))
		content := fmt.Sprintf("Batch file %d content for caching example", i)
		err := os.WriteFile(filePath, []byte(content), 0644)
		if err != nil {
			fmt.Printf("Error creating file %d: %v\n", i, err)
			return
		}
		files[i] = filePath
		defer os.Remove(filePath)
	}

	// Create cached parser
	config := schema.DefaultChunkingConfig()
	cachedParser := core.NewCachedUnifiedParser(config, cache.DefaultCacheConfig())
	defer cachedParser.Close()

	ctx := context.Background()

	// First batch parse - all cache misses
	results1, err := cachedParser.BatchParseFiles(ctx, files)
	if err != nil {
		fmt.Printf("Error in first batch parse: %v\n", err)
		return
	}

	// Second batch parse - all cache hits
	results2, err := cachedParser.BatchParseFiles(ctx, files)
	if err != nil {
		fmt.Printf("Error in second batch parse: %v\n", err)
		return
	}

	fmt.Printf("First batch: %d files\n", len(results1))
	fmt.Printf("Second batch: %d files\n", len(results2))

	// Show detailed results
	for i, filePath := range files {
		chunks := results2[filePath]
		fmt.Printf("File %d: %d chunks\n", i, len(chunks))
	}

	metrics := cachedParser.GetCacheMetrics()
	fmt.Printf("Cache efficiency: %.2f%% hit rate\n", metrics.HitRate*100)

}
Output:
First batch: 3 files
Second batch: 3 files
File 0: 1 chunks
File 1: 1 chunks
File 2: 1 chunks
Cache efficiency: 50.00% hit rate
Example (CacheEviction)

Example_cacheEviction demonstrates cache eviction policies

package main

import (
	"context"
	"fmt"

	"github.com/NortonBen/ai-memory-go/parser/cache"
	"github.com/NortonBen/ai-memory-go/schema"
)

func main() {
	// Create cache with small size to trigger eviction
	config := cache.DefaultCacheConfig()
	config.MaxSize = 2 // Only 2 entries
	config.Policy = cache.PolicyLRU

	pc := cache.NewInMemoryParsingCache(config)
	defer pc.Close()

	ctx := context.Background()

	// Create test chunks
	chunks1 := []*schema.Chunk{{ID: "1", Content: "First content", Type: schema.ChunkTypeText}}
	chunks2 := []*schema.Chunk{{ID: "2", Content: "Second content", Type: schema.ChunkTypeText}}
	chunks3 := []*schema.Chunk{{ID: "3", Content: "Third content", Type: schema.ChunkTypeText}}

	// Add first entry
	pc.Set(ctx, "key1", chunks1, nil)
	fmt.Printf("Added key1, cache size: %d\n", pc.GetMetrics().TotalEntries)

	// Add second entry
	pc.Set(ctx, "key2", chunks2, nil)
	fmt.Printf("Added key2, cache size: %d\n", pc.GetMetrics().TotalEntries)

	// Access key1 to make it recently used
	pc.Get(ctx, "key1")
	fmt.Println("Accessed key1 (making it recently used)")

	// Add third entry - should evict key2 (least recently used)
	pc.Set(ctx, "key3", chunks3, nil)
	fmt.Printf("Added key3, cache size: %d\n", pc.GetMetrics().TotalEntries)

	// Check which keys are still in cache
	_, found1 := pc.Get(ctx, "key1")
	_, found2 := pc.Get(ctx, "key2")
	_, found3 := pc.Get(ctx, "key3")

	fmt.Printf("key1 in cache: %t\n", found1)
	fmt.Printf("key2 in cache: %t (evicted)\n", found2)
	fmt.Printf("key3 in cache: %t\n", found3)

	metrics := pc.GetMetrics()
	fmt.Printf("Total evictions: %d\n", metrics.Evictions)

}
Output:
Added key1, cache size: 1
Added key2, cache size: 2
Accessed key1 (making it recently used)
Added key3, cache size: 2
key1 in cache: true
key2 in cache: false (evicted)
key3 in cache: true
Total evictions: 1
Example (CacheMetrics)

Example_cacheMetrics demonstrates comprehensive cache metrics

package main

import (
	"context"
	"fmt"

	"github.com/NortonBen/ai-memory-go/parser/cache"
	"github.com/NortonBen/ai-memory-go/schema"
)

func main() {
	config := cache.DefaultCacheConfig()
	config.EnableMetrics = true
	pc := cache.NewInMemoryParsingCache(config)
	defer pc.Close()

	ctx := context.Background()
	chunks := []*schema.Chunk{{ID: "1", Content: "Metrics test content", Type: schema.ChunkTypeText}}

	// Perform various cache operations
	pc.Set(ctx, "key1", chunks, nil)
	pc.Set(ctx, "key2", chunks, nil)
	pc.Set(ctx, "key3", chunks, nil)

	// Generate some hits and misses
	pc.Get(ctx, "key1") // hit
	pc.Get(ctx, "key2") // hit
	pc.Get(ctx, "key4") // miss
	pc.Get(ctx, "key5") // miss
	pc.Get(ctx, "key1") // hit

	// Get comprehensive metrics
	metrics := pc.GetMetrics()

	fmt.Printf("Cache Metrics:\n")
	fmt.Printf("  Total Entries: %d\n", metrics.TotalEntries)
	fmt.Printf("  Hits: %d\n", metrics.Hits)
	fmt.Printf("  Misses: %d\n", metrics.Misses)
	fmt.Printf("  Evictions: %d\n", metrics.Evictions)

}
Output:
Cache Metrics:
  Total Entries: 3
  Hits: 3
  Misses: 2
  Evictions: 0
Example (CacheWarmup)

Example_cacheWarmup demonstrates cache warmup for frequently accessed files

package main

import (
	"context"
	"fmt"
	"os"
	"path/filepath"

	"github.com/NortonBen/ai-memory-go/parser/cache"
	"github.com/NortonBen/ai-memory-go/parser/core"
	"github.com/NortonBen/ai-memory-go/schema"
)

func main() {
	// Create test files
	tmpDir := os.TempDir()
	frequentFiles := make([]string, 3)

	for i := 0; i < 3; i++ {
		filePath := filepath.Join(tmpDir, fmt.Sprintf("frequent_%d.txt", i))
		content := fmt.Sprintf("Frequently accessed file %d", i)
		err := os.WriteFile(filePath, []byte(content), 0644)
		if err != nil {
			fmt.Printf("Error creating file: %v\n", err)
			return
		}
		frequentFiles[i] = filePath
		defer os.Remove(filePath)
	}

	// Create cached parser
	config := schema.DefaultChunkingConfig()
	cachedParser := core.NewCachedUnifiedParser(config, cache.DefaultCacheConfig())
	defer cachedParser.Close()

	ctx := context.Background()

	// Warmup cache with frequently accessed files
	fmt.Println("Warming up cache...")
	err := cachedParser.WarmupCache(ctx, frequentFiles)
	if err != nil {
		fmt.Printf("Error warming up cache: %v\n", err)
		return
	}

	// Now all subsequent accesses will be cache hits
	fmt.Println("Accessing warmed up files...")
	for i, filePath := range frequentFiles {
		chunks, err := cachedParser.ParseFile(ctx, filePath)

		if err != nil {
			fmt.Printf("Error parsing file %d: %v\n", i, err)
			continue
		}

		fmt.Printf("File %d: %d chunks (cached)\n", i, len(chunks))
	}

	metrics := cachedParser.GetCacheMetrics()
	fmt.Printf("Post-warmup hits: %d\n", metrics.Hits)

}
Output:
Warming up cache...
Accessing warmed up files...
File 0: 1 chunks (cached)
File 1: 1 chunks (cached)
File 2: 1 chunks (cached)
Post-warmup hits: 3
Example (CachedParser)

Example_cachedParser demonstrates using cached parser wrapper

package main

import (
	"context"
	"fmt"
	"time"

	"github.com/NortonBen/ai-memory-go/parser/cache"
	"github.com/NortonBen/ai-memory-go/parser/core"
	"github.com/NortonBen/ai-memory-go/schema"
)

func main() {
	// Create a unified parser
	config := schema.DefaultChunkingConfig()
	cacheConfig := cache.DefaultCacheConfig()
	cacheConfig.TTL = 1 * time.Hour // Cache for 1 hour

	cachedParser := core.NewCachedUnifiedParser(config, cacheConfig)
	defer cachedParser.Close()

	ctx := context.Background()
	testContent := "This is test content that will be cached after first parse."

	// First parse - will hit the underlying parser
	chunks1, err := cachedParser.ParseText(ctx, testContent)
	if err != nil {
		fmt.Printf("Error parsing: %v\n", err)
		return
	}

	chunks2, err := cachedParser.ParseText(ctx, testContent)
	if err != nil {
		fmt.Printf("Error parsing: %v\n", err)
		return
	}

	fmt.Printf("First parse: %d chunks\n", len(chunks1))
	fmt.Printf("Second parse: %d chunks\n", len(chunks2))

	// Show cache metrics
	metrics := cachedParser.GetCacheMetrics()
	fmt.Printf("Cache hit rate: %.2f%%\n", metrics.HitRate*100)

}
Output:
First parse: 1 chunks
Second parse: 1 chunks
Cache hit rate: 50.00%
Example (FileCaching)

Example_fileCaching demonstrates file-based caching with modification time checking

package main

import (
	"context"
	"fmt"
	"os"
	"path/filepath"
	"time"

	"github.com/NortonBen/ai-memory-go/parser/cache"
	"github.com/NortonBen/ai-memory-go/parser/core"
	"github.com/NortonBen/ai-memory-go/schema"
)

func main() {
	// Create a temporary file
	tmpDir := os.TempDir()
	testFile := filepath.Join(tmpDir, "cache_example.txt")

	// Write initial content
	err := os.WriteFile(testFile, []byte("Initial file content"), 0644)
	if err != nil {
		fmt.Printf("Error creating file: %v\n", err)
		return
	}
	defer os.Remove(testFile)

	// Create cached parser with file modification checking enabled
	config := schema.DefaultChunkingConfig()
	cacheConfig := cache.DefaultCacheConfig()
	cacheConfig.CheckFileModTime = true
	cachedParser := core.NewCachedUnifiedParser(config, cacheConfig)
	defer cachedParser.Close()

	ctx := context.Background()

	// First parse - cache miss
	chunks1, err := cachedParser.ParseFile(ctx, testFile)
	if err != nil {
		fmt.Printf("Error parsing file: %v\n", err)
		return
	}
	fmt.Printf("First parse: %d chunks, content: %s\n", len(chunks1), chunks1[0].Content)

	// Second parse - cache hit
	chunks2, err := cachedParser.ParseFile(ctx, testFile)
	if err != nil {
		fmt.Printf("Error parsing file: %v\n", err)
		return
	}
	fmt.Printf("Second parse: %d chunks (cached)\n", len(chunks2))

	// Modify the file
	time.Sleep(10 * time.Millisecond) // Ensure different modification time
	err = os.WriteFile(testFile, []byte("Modified file content"), 0644)
	if err != nil {
		fmt.Printf("Error modifying file: %v\n", err)
		return
	}

	// Third parse - cache miss due to file modification
	chunks3, err := cachedParser.ParseFile(ctx, testFile)
	if err != nil {
		fmt.Printf("Error parsing modified file: %v\n", err)
		return
	}
	fmt.Printf("Third parse: %d chunks, content: %s\n", len(chunks3), chunks3[0].Content)

	metrics := cachedParser.GetCacheMetrics()
	// Final cache stats - Hits: 1, Misses: 2 (1 for initial, 1 for after modification)
	fmt.Printf("Final cache stats - Hits: %d, Misses: %d\n", metrics.Hits, metrics.Misses)

}
Output:
First parse: 1 chunks, content: Initial file content
Second parse: 1 chunks (cached)
Third parse: 1 chunks, content: Modified file content
Final cache stats - Hits: 1, Misses: 2

Index

Examples

Constants

View Source
const (
	PolicyLRU  = schema.PolicyLRU
	PolicyLFU  = schema.PolicyLFU
	PolicyTTL  = schema.PolicyTTL
	PolicyFIFO = schema.PolicyFIFO
)

Variables

This section is empty.

Functions

func DefaultCacheConfig

func DefaultCacheConfig() *schema.CacheConfig

DefaultCacheConfig returns sensible defaults for cache configuration

Types

type CacheConfig

type CacheConfig = schema.CacheConfig

CacheConfig is an alias for schema.CacheConfig to maintain some backward compatibility if needed locally, but we prefer using schema.CacheConfig directly.

type CacheEntry

type CacheEntry struct {
	Key            string                 `json:"key"`
	Chunks         []*schema.Chunk        `json:"chunks"`
	Metadata       map[string]interface{} `json:"metadata"`
	CreatedAt      time.Time              `json:"created_at"`
	LastAccessed   time.Time              `json:"last_accessed"`
	AccessCount    int64                  `json:"access_count"`
	ExpiresAt      time.Time              `json:"expires_at"`
	FileModTime    time.Time              `json:"file_mod_time,omitempty"`
	FilePath       string                 `json:"file_path,omitempty"`
	ContentHash    string                 `json:"content_hash"`
	EstimatedSize  int64                  `json:"estimated_size"`
	IsCompressed   bool                   `json:"is_compressed"`
	CompressedData []byte                 `json:"compressed_data,omitempty"`
	Priority       int                    `json:"priority"` // Higher priority = less likely to be evicted
	Tags           []string               `json:"tags"`     // For categorization and bulk operations
}

CacheEntry represents a cached parsing result

type CacheEntryOptions

type CacheEntryOptions struct {
	TTL      *time.Duration `json:"ttl,omitempty"`
	Priority int            `json:"priority"`
	Tags     []string       `json:"tags"`
	Compress bool           `json:"compress"`
}

CacheEntryOptions provides advanced options for cache entries

type CacheMetrics

type CacheMetrics = schema.CacheMetrics

CacheMetrics is an alias for schema.CacheMetrics

type CachePolicy

type CachePolicy = schema.CachePolicy

CachePolicy defines different cache eviction policies

type CachedParser

type CachedParser struct {
	// contains filtered or unexported fields
}

CachedParser wraps any Parser implementation with intelligent caching

func NewCachedParser

func NewCachedParser(p schema.Parser, cache ParsingCache) *CachedParser

NewCachedParser creates a new cached parser wrapper

func NewCachedParserWithConfig

func NewCachedParserWithConfig(p schema.Parser, config *schema.CacheConfig) *CachedParser

NewCachedParserWithConfig creates a cached parser with custom cache configuration

func (*CachedParser) CleanupCache

func (cp *CachedParser) CleanupCache(ctx context.Context) error

CleanupCache removes expired and invalid entries

func (*CachedParser) Close

func (cp *CachedParser) Close() error

Close gracefully shuts down the cached parser

func (*CachedParser) DetectContentType

func (cp *CachedParser) DetectContentType(content string) schema.ChunkType

DetectContentType delegates to the underlying parser

func (*CachedParser) GetCache

func (cp *CachedParser) GetCache() ParsingCache

GetCache returns the underlying cache for direct access

func (*CachedParser) GetCacheMetrics

func (cp *CachedParser) GetCacheMetrics() *schema.CacheMetrics

GetCacheMetrics returns current cache performance metrics

func (*CachedParser) InvalidateCache

func (cp *CachedParser) InvalidateCache(ctx context.Context) error

InvalidateCache removes all cached entries

func (*CachedParser) InvalidateFile

func (cp *CachedParser) InvalidateFile(ctx context.Context, filePath string) error

InvalidateFile removes cached entries for a specific file

func (*CachedParser) ParseFile

func (cp *CachedParser) ParseFile(ctx context.Context, filePath string) ([]*schema.Chunk, error)

ParseFile parses a file with caching support

func (*CachedParser) ParseMarkdown

func (cp *CachedParser) ParseMarkdown(ctx context.Context, content string) ([]*schema.Chunk, error)

ParseMarkdown parses markdown content with caching support

func (*CachedParser) ParsePDF

func (cp *CachedParser) ParsePDF(ctx context.Context, filePath string) ([]*schema.Chunk, error)

ParsePDF parses PDF files with caching support

func (*CachedParser) ParseText

func (cp *CachedParser) ParseText(ctx context.Context, content string) ([]*schema.Chunk, error)

ParseText parses text content with caching support

func (*CachedParser) WarmupCache

func (cp *CachedParser) WarmupCache(ctx context.Context, filePaths []string) error

WarmupCache pre-populates the cache with frequently accessed files

type Compressor

type Compressor interface {
	Compress(data []byte) ([]byte, error)
	Decompress(data []byte) ([]byte, error)
}

Compressor interface for content compression

type GzipCompressor

type GzipCompressor struct{}

GzipCompressor implements compression using gzip

func (*GzipCompressor) Compress

func (gc *GzipCompressor) Compress(data []byte) ([]byte, error)

func (*GzipCompressor) Decompress

func (gc *GzipCompressor) Decompress(data []byte) ([]byte, error)

type InMemoryParsingCache

type InMemoryParsingCache struct {
	Config *schema.CacheConfig
	// contains filtered or unexported fields
}

InMemoryParsingCache implements ParsingCache using in-memory storage

Example (EnhancedFeatures)

ExampleInMemoryParsingCache_enhancedFeatures demonstrates the enhanced caching features

package main

import (
	"context"
	"fmt"
	"time"

	"github.com/NortonBen/ai-memory-go/parser/cache"
	"github.com/NortonBen/ai-memory-go/schema"
)

func main() {
	// Create a production-ready cache configuration
	config := &cache.CacheConfig{
		MaxSize:                 1000,
		MaxMemoryMB:             50,
		TTL:                     1 * time.Hour,
		Policy:                  cache.PolicyLRU,
		EnablePersistence:       true,
		PersistencePath:         "/tmp/parser_cache.json",
		CheckFileModTime:        true,
		EnableMetrics:           true,
		CleanupInterval:         5 * time.Minute,
		EnableCompression:       true,
		CompressionThreshold:    1024,
		EnableAsyncPersistence:  true,
		PersistenceInterval:     10 * time.Minute,
		MaxConcurrentOperations: 50,
	}

	// Create cache with enhanced features
	pc := cache.NewInMemoryParsingCache(config)
	defer pc.Close()

	ctx := context.Background()

	// Example 1: Basic caching with compression
	largeContent := make([]byte, 2000)
	for i := range largeContent {
		largeContent[i] = 'A'
	}

	chunks := []*schema.Chunk{
		{ID: "1", Content: string(largeContent), Type: schema.ChunkTypeText},
	}

	// Set with compression and tags
	options := &cache.CacheEntryOptions{
		Compress: true,
		Tags:     []string{"large-content", "example"},
		Priority: 5,
	}

	err := pc.SetWithOptions(ctx, "large-key", chunks, nil, options)
	if err != nil {
		fmt.Printf("Error setting cache: %v\n", err)
		return
	}

	// Retrieve compressed content
	retrievedChunks, found := pc.Get(ctx, "large-key")
	if found {
		fmt.Printf("Retrieved %d chunks from cache\n", len(retrievedChunks))
	}

	// Example 2: Tag-based operations
	// Add more tagged content
	smallChunks := []*schema.Chunk{
		{ID: "2", Content: "Small content", Type: schema.ChunkTypeText},
	}

	tagOptions := &cache.CacheEntryOptions{
		Tags: []string{"small-content", "example"},
	}

	pc.SetWithOptions(ctx, "small-key", smallChunks, nil, tagOptions)

	// Delete all content with "example" tag
	pc.DeleteByTag(ctx, "example")

	// Example 3: Metrics monitoring
	_ = pc.GetMetrics()

}
Output:
Retrieved 1 chunks from cache
Example (Persistence)

ExampleInMemoryParsingCache_persistence demonstrates persistence and warmup features

package main

import (
	"context"
	"fmt"
	"os"
	"path/filepath"

	"github.com/NortonBen/ai-memory-go/parser/cache"
	"github.com/NortonBen/ai-memory-go/schema"
)

func main() {
	tmpDir := os.TempDir()
	cacheFile := filepath.Join(tmpDir, "example_enhanced_cache.json")
	defer os.Remove(cacheFile)

	// Create cache with persistence enabled
	config := cache.DefaultCacheConfig()
	config.EnablePersistence = true
	config.PersistencePath = cacheFile

	pc := cache.NewInMemoryParsingCache(config)
	ctx := context.Background()

	// Add some content to cache
	chunks := []*schema.Chunk{
		{ID: "1", Content: "Persistent content example", Type: schema.ChunkTypeText},
	}

	err := pc.Set(ctx, "persistent-key", chunks, nil)
	if err != nil {
		fmt.Printf("Error setting cache: %v\n", err)
		return
	}

	// Manually persist to disk
	err = pc.Persist(ctx)
	if err != nil {
		fmt.Printf("Error persisting cache: %v\n", err)
		return
	}

	// Close the cache
	pc.Close()

	// Create new cache instance and load from disk
	newCache := cache.NewInMemoryParsingCache(config)
	defer newCache.Close()

	err = newCache.Load(ctx)
	if err != nil {
		fmt.Printf("Error loading cache: %v\n", err)
		return
	}

	// Verify data was loaded
	retrievedChunks, found := newCache.Get(ctx, "persistent-key")
	if found {
		fmt.Printf("Successfully loaded %d chunks from persistent storage\n", len(retrievedChunks))
		fmt.Printf("Content: %s\n", retrievedChunks[0].Content)
	} else {
		fmt.Printf("Failed to load data from persistent storage\n")
	}

	// Show persistence metrics
	metrics := newCache.GetMetrics()
	fmt.Printf("Persistence Operations: %d\n", metrics.PersistenceOperations)

}
Output:
Successfully loaded 1 chunks from persistent storage
Content: Persistent content example
Persistence Operations: 0

func NewInMemoryParsingCache

func NewInMemoryParsingCache(config *schema.CacheConfig) *InMemoryParsingCache

NewInMemoryParsingCache creates a new in-memory parsing cache

func (*InMemoryParsingCache) Cleanup

func (c *InMemoryParsingCache) Cleanup(ctx context.Context) error

Cleanup removes expired and evicted entries

func (*InMemoryParsingCache) Clear

func (c *InMemoryParsingCache) Clear(ctx context.Context) error

Clear removes all entries from cache

func (*InMemoryParsingCache) Close

func (c *InMemoryParsingCache) Close() error

Close gracefully shuts down the cache

func (*InMemoryParsingCache) Delete

func (c *InMemoryParsingCache) Delete(ctx context.Context, key string) error

Delete removes an entry from cache

func (*InMemoryParsingCache) DeleteByTag

func (c *InMemoryParsingCache) DeleteByTag(ctx context.Context, tag string) error

DeleteByTag removes all entries with a specific tag

func (*InMemoryParsingCache) GenerateCacheKey

func (c *InMemoryParsingCache) GenerateCacheKey(input string) string

GenerateCacheKey creates a cache key from content or file path

func (*InMemoryParsingCache) GenerateFileKey

func (c *InMemoryParsingCache) GenerateFileKey(filePath string) string

GenerateFileKey creates a cache key specifically for file paths

func (*InMemoryParsingCache) Get

func (c *InMemoryParsingCache) Get(ctx context.Context, key string) ([]*schema.Chunk, bool)

Get retrieves cached parsing results

func (*InMemoryParsingCache) GetByFile

func (c *InMemoryParsingCache) GetByFile(ctx context.Context, filePath string) ([]*schema.Chunk, bool)

GetByFile retrieves cached results for a specific file

func (*InMemoryParsingCache) GetKeys

func (c *InMemoryParsingCache) GetKeys() []string

GetKeys returns all cache keys (for debugging/monitoring)

func (*InMemoryParsingCache) GetMetrics

func (c *InMemoryParsingCache) GetMetrics() *schema.CacheMetrics

GetMetrics returns current cache performance metrics

func (*InMemoryParsingCache) GetSize

func (c *InMemoryParsingCache) GetSize() (entries int, memoryBytes int64)

GetSize returns current cache size information

func (*InMemoryParsingCache) IsValid

func (c *InMemoryParsingCache) IsValid(ctx context.Context, key string) bool

IsValid checks if a cache entry is still valid

func (*InMemoryParsingCache) Load

Load restores cache from persistent storage

func (*InMemoryParsingCache) Persist

func (c *InMemoryParsingCache) Persist(ctx context.Context) error

Persist saves cache to persistent storage

func (*InMemoryParsingCache) Set

func (c *InMemoryParsingCache) Set(ctx context.Context, key string, chunks []*schema.Chunk, metadata map[string]interface{}) error

Set stores parsing results in cache

func (*InMemoryParsingCache) SetByFile

func (c *InMemoryParsingCache) SetByFile(ctx context.Context, filePath string, chunks []*schema.Chunk, metadata map[string]interface{}) error

SetByFile stores parsing results for a specific file

func (*InMemoryParsingCache) SetWithOptions

func (c *InMemoryParsingCache) SetWithOptions(ctx context.Context, key string, chunks []*schema.Chunk, metadata map[string]interface{}, options *CacheEntryOptions) error

SetWithOptions stores parsing results with advanced options

func (*InMemoryParsingCache) Warmup

func (c *InMemoryParsingCache) Warmup(ctx context.Context, filePaths []string) error

Warmup preloads frequently accessed files

type ParsingCache

type ParsingCache interface {
	// Get retrieves cached parsing results
	Get(ctx context.Context, key string) ([]*schema.Chunk, bool)

	// GenerateCacheKey creates a cache key from content or file path
	GenerateCacheKey(input string) string

	// GenerateFileKey creates a cache key specifically for file paths
	GenerateFileKey(filePath string) string

	// Set stores parsing results in cache
	Set(ctx context.Context, key string, chunks []*schema.Chunk, metadata map[string]interface{}) error

	// SetWithOptions stores parsing results with advanced options
	SetWithOptions(ctx context.Context, key string, chunks []*schema.Chunk, metadata map[string]interface{}, options *CacheEntryOptions) error

	// GetByFile retrieves cached results for a specific file
	GetByFile(ctx context.Context, filePath string) ([]*schema.Chunk, bool)

	// SetByFile stores parsing results for a specific file
	SetByFile(ctx context.Context, filePath string, chunks []*schema.Chunk, metadata map[string]interface{}) error

	// Delete removes an entry from cache
	Delete(ctx context.Context, key string) error

	// DeleteByTag removes all entries with a specific tag
	DeleteByTag(ctx context.Context, tag string) error

	// Clear removes all entries from cache
	Clear(ctx context.Context) error

	// IsValid checks if a cache entry is still valid (file mod time, TTL)
	IsValid(ctx context.Context, key string) bool

	// GetMetrics returns current cache performance metrics
	GetMetrics() *schema.CacheMetrics

	// Cleanup removes expired and evicted entries
	Cleanup(ctx context.Context) error

	// Persist saves cache to persistent storage
	Persist(ctx context.Context) error

	// Load restores cache from persistent storage
	Load(ctx context.Context) error

	// Warmup preloads frequently accessed files
	Warmup(ctx context.Context, filePaths []string) error

	// GetKeys returns all cache keys (for debugging/monitoring)
	GetKeys() []string

	// GetSize returns current cache size information
	GetSize() (entries int, memoryBytes int64)

	// Close gracefully shuts down the cache
	Close() error
}

ParsingCache defines the interface for parsing result caching

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL