Interface CognitiveMemoryConfig

interface CognitiveMemoryConfig {
    workingMemory: IWorkingMemory;
    knowledgeGraph: IKnowledgeGraph;
    vectorStore: IVectorStore;
    embeddingManager: IEmbeddingManager;
    agentId: string;
    traits: HexacoTraits;
    moodProvider: (() => PADState);
    featureDetectionStrategy: "hybrid" | "llm" | "keyword";
    featureDetectionLlmInvoker?: ((systemPrompt, userPrompt) => Promise<string>);
    encoding?: Partial<EncodingConfig>;
    decay?: Partial<DecayConfig>;
    workingMemoryCapacity?: number;
    tokenBudget?: Partial<MemoryBudgetAllocation>;
    observer?: Partial<ObserverConfig>;
    reflector?: Partial<ReflectorConfig>;
    graph?: Partial<MemoryGraphConfig>;
    consolidation?: Partial<ConsolidationConfig>;
    cognitiveMechanisms?: CognitiveMechanismsConfig;
    infiniteContext?: Partial<InfiniteContextConfig>;
    maxContextTokens?: number;
    collectionPrefix?: string;
}

Properties

workingMemory: IWorkingMemory
knowledgeGraph: IKnowledgeGraph
vectorStore: IVectorStore
embeddingManager: IEmbeddingManager
agentId: string
traits: HexacoTraits
moodProvider: (() => PADState)

Callback to get current mood from MoodEngine or similar.

Type declaration

featureDetectionStrategy: "hybrid" | "llm" | "keyword"

Default

'keyword'
featureDetectionLlmInvoker?: ((systemPrompt, userPrompt) => Promise<string>)

Required when strategy is 'llm' or 'hybrid'.

Type declaration

    • (systemPrompt, userPrompt): Promise<string>
    • Parameters

      • systemPrompt: string
      • userPrompt: string

      Returns Promise<string>

encoding?: Partial<EncodingConfig>
decay?: Partial<DecayConfig>
workingMemoryCapacity?: number

Default

7 (Miller's number)
tokenBudget?: Partial<MemoryBudgetAllocation>
observer?: Partial<ObserverConfig>
reflector?: Partial<ReflectorConfig>
graph?: Partial<MemoryGraphConfig>
consolidation?: Partial<ConsolidationConfig>
cognitiveMechanisms?: CognitiveMechanismsConfig

Optional per-mechanism cognitive science extensions (reconsolidation, RIF, FOK, etc.).

infiniteContext?: Partial<InfiniteContextConfig>

Infinite context window config. Enables transparent compaction for forever conversations.

maxContextTokens?: number

Max context window size in tokens (required for infinite context).

collectionPrefix?: string

Default

'cogmem'