diff --git a/CHANGELOG.md b/CHANGELOG.md index e4447a5..8474e43 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,4 @@ +- [2024-10-09] [Extract memory to class](https://github.com/RubricLab/memory/commit/5e165608ffad822c5b77ee03f1dfc308dcb1787a) - [2024-10-09] [Fix precision calc](https://github.com/RubricLab/memory/commit/52fc41e151c47e276c37a24b3489ba414d032a0b) - [2024-10-09] [Generalize eval arch](https://github.com/RubricLab/memory/commit/bf80487850e840525a1521925a439d7d9fc8d638) - [2024-10-09] [Add help cmd](https://github.com/RubricLab/memory/commit/9f35d0016dcd5d0f909cb77c2ea33ef70da60fb1) diff --git a/evals/index.ts b/evals/index.ts index 3ceb7ed..97b1b21 100644 --- a/evals/index.ts +++ b/evals/index.ts @@ -29,6 +29,8 @@ if (import.meta.path === Bun.main) { process.exit(0) } - await runOneShotExamples({ fast: args.values.fast }) - await runMultiTurnExamples({ fast: args.values.fast }) + const model = args.values.fast ? 'gpt-4o-mini' : 'gpt-4o-2024-08-06' + + await runOneShotExamples({ model }) + await runMultiTurnExamples({ model }) } diff --git a/evals/multi-turn/index.ts b/evals/multi-turn/index.ts index 3a02127..f5d9cda 100644 --- a/evals/multi-turn/index.ts +++ b/evals/multi-turn/index.ts @@ -1,10 +1,10 @@ +import type { openai } from '@ai-sdk/openai' +import { Memory } from '../..' import { EXAMPLES } from './examples' -export const runMultiTurnExamples = async ({ - fast -}: { - fast?: boolean -}) => { +export const runMultiTurnExamples = async ({ model }: { model: Parameters[0] }) => { + const memory = new Memory({ model }) + for await (const eg of EXAMPLES) { console.log(eg) } diff --git a/evals/one-shot/index.ts b/evals/one-shot/index.ts index 7f7f871..b596b3e 100644 --- a/evals/one-shot/index.ts +++ b/evals/one-shot/index.ts @@ -1,54 +1,23 @@ -import { openai } from '@ai-sdk/openai' -import { generateObject } from 'ai' +import type { openai } from '@ai-sdk/openai' import chalk from 'chalk' -import { z } from 'zod' -import { clean, format } from '../../utils/string.ts' +import { Memory } from '../../index.ts' +import { format } from '../../utils/string.ts' import { EXAMPLES } from './examples.ts' -export const runOneShotExamples = async ({ fast }: { fast?: boolean }) => { +export const runOneShotExamples = async ({ model }: { model: Parameters[0] }) => { let totalFacts = 0 let totalRecall = 0 let totalAttempts = 0 + const memory = new Memory({ model }) + for await (const eg of EXAMPLES) { totalFacts += eg.facts.length console.log(chalk.yellow(`\n\n"${eg.content}"`)) - const { - object: { facts: attempts } - } = await generateObject({ - model: openai(fast ? 'gpt-4o-mini' : 'gpt-4o-2024-08-06'), - schema: z.object({ - facts: z.array( - z.object({ - subject: z.string(), - relation: z.string().describe('a verb phrase'), - object: z.string(), - data: z.record(z.string(), z.string()).optional().describe('to capture any additional info') - }) - ) - }), - prompt: clean`Please extract all probable and implicit facts from the following passage. - Portray the first-person as "user". - Capture new relationships. - Try to capture the most up-to-date state of affairs in present tense. - Passage: - "${eg.content}" - ` - // messages: [ - // { - // role: 'system', - // content: clean`Please extract all probable and implicit facts from the following passage. - // Portray the first-person as "user". - // Capture new relationships. - // Try to capture the most up-to-date state of affairs in present tense.` - // }, - // { - // role: 'user', - // content: eg.content - // } - // ] + const { facts: attempts } = await memory.extract({ + content: eg.content }) const omitted: number[] = [] diff --git a/index.ts b/index.ts index 4dc39eb..9c7b2ea 100644 --- a/index.ts +++ b/index.ts @@ -1 +1,54 @@ -export const memory = () => 'memory' +import { openai } from '@ai-sdk/openai' +import { generateObject } from 'ai' +import { z } from 'zod' +import { clean } from './utils/string' + +export class Memory { + model: Parameters[0] + constructor({ + model + }: { + model: Parameters[0] + }) { + this.model = model + } + + async extract({ content }: { content: string }) { + const { + object: { facts } + } = await generateObject({ + model: openai(this.model), + schema: z.object({ + facts: z.array( + z.object({ + subject: z.string(), + relation: z.string().describe('a verb phrase'), + object: z.string(), + data: z.record(z.string(), z.string()).optional().describe('to capture any additional info') + }) + ) + }), + prompt: clean`Please extract all probable and implicit facts from the following passage. + Portray the first-person as "user". + Capture new relationships. + Try to capture the most up-to-date state of affairs in present tense. + Passage: + "${content}"` + // messages: [ + // { + // role: 'system', + // content: clean`Please extract all probable and implicit facts from the following passage. + // Portray the first-person as "user". + // Capture new relationships. + // Try to capture the most up-to-date state of affairs in present tense.` + // }, + // { + // role: 'user', + // content: eg.content + // } + // ] + }) + + return { facts } + } +} diff --git a/package.json b/package.json index afb01fd..c1fc0b0 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "@rubriclab/memory", "module": "index.ts", - "version": "0.0.7", + "version": "0.0.8", "private": false, "type": "module", "devDependencies": {