Skip to content

Commit

Permalink
Extract memory to class
Browse files Browse the repository at this point in the history
  • Loading branch information
tedspare committed Oct 9, 2024
1 parent 050d270 commit 78834be
Show file tree
Hide file tree
Showing 6 changed files with 73 additions and 48 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
- [2024-10-09] [Extract memory to class](https://github.com/RubricLab/memory/commit/5e165608ffad822c5b77ee03f1dfc308dcb1787a)
- [2024-10-09] [Fix precision calc](https://github.com/RubricLab/memory/commit/52fc41e151c47e276c37a24b3489ba414d032a0b)
- [2024-10-09] [Generalize eval arch](https://github.com/RubricLab/memory/commit/bf80487850e840525a1521925a439d7d9fc8d638)
- [2024-10-09] [Add help cmd](https://github.com/RubricLab/memory/commit/9f35d0016dcd5d0f909cb77c2ea33ef70da60fb1)
Expand Down
6 changes: 4 additions & 2 deletions evals/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ if (import.meta.path === Bun.main) {
process.exit(0)
}

await runOneShotExamples({ fast: args.values.fast })
await runMultiTurnExamples({ fast: args.values.fast })
const model = args.values.fast ? 'gpt-4o-mini' : 'gpt-4o-2024-08-06'

await runOneShotExamples({ model })
await runMultiTurnExamples({ model })
}
10 changes: 5 additions & 5 deletions evals/multi-turn/index.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import type { openai } from '@ai-sdk/openai'
import { Memory } from '../..'
import { EXAMPLES } from './examples'

export const runMultiTurnExamples = async ({
fast
}: {
fast?: boolean
}) => {
export const runMultiTurnExamples = async ({ model }: { model: Parameters<typeof openai>[0] }) => {
const memory = new Memory({ model })

for await (const eg of EXAMPLES) {
console.log(eg)
}
Expand Down
47 changes: 8 additions & 39 deletions evals/one-shot/index.ts
Original file line number Diff line number Diff line change
@@ -1,54 +1,23 @@
import { openai } from '@ai-sdk/openai'
import { generateObject } from 'ai'
import type { openai } from '@ai-sdk/openai'
import chalk from 'chalk'
import { z } from 'zod'
import { clean, format } from '../../utils/string.ts'
import { Memory } from '../../index.ts'
import { format } from '../../utils/string.ts'
import { EXAMPLES } from './examples.ts'

export const runOneShotExamples = async ({ fast }: { fast?: boolean }) => {
export const runOneShotExamples = async ({ model }: { model: Parameters<typeof openai>[0] }) => {
let totalFacts = 0
let totalRecall = 0
let totalAttempts = 0

const memory = new Memory({ model })

for await (const eg of EXAMPLES) {
totalFacts += eg.facts.length

console.log(chalk.yellow(`\n\n"${eg.content}"`))

const {
object: { facts: attempts }
} = await generateObject({
model: openai(fast ? 'gpt-4o-mini' : 'gpt-4o-2024-08-06'),
schema: z.object({
facts: z.array(
z.object({
subject: z.string(),
relation: z.string().describe('a verb phrase'),
object: z.string(),
data: z.record(z.string(), z.string()).optional().describe('to capture any additional info')
})
)
}),
prompt: clean`Please extract all probable and implicit facts from the following passage.
Portray the first-person as "user".
Capture new relationships.
Try to capture the most up-to-date state of affairs in present tense.
Passage:
"${eg.content}"
`
// messages: [
// {
// role: 'system',
// content: clean`Please extract all probable and implicit facts from the following passage.
// Portray the first-person as "user".
// Capture new relationships.
// Try to capture the most up-to-date state of affairs in present tense.`
// },
// {
// role: 'user',
// content: eg.content
// }
// ]
const { facts: attempts } = await memory.extract({
content: eg.content
})

const omitted: number[] = []
Expand Down
55 changes: 54 additions & 1 deletion index.ts
Original file line number Diff line number Diff line change
@@ -1 +1,54 @@
export const memory = () => 'memory'
import { openai } from '@ai-sdk/openai'
import { generateObject } from 'ai'
import { z } from 'zod'
import { clean } from './utils/string'

export class Memory {
model: Parameters<typeof openai>[0]
constructor({
model
}: {
model: Parameters<typeof openai>[0]
}) {
this.model = model
}

async extract({ content }: { content: string }) {
const {
object: { facts }
} = await generateObject({
model: openai(this.model),
schema: z.object({
facts: z.array(
z.object({
subject: z.string(),
relation: z.string().describe('a verb phrase'),
object: z.string(),
data: z.record(z.string(), z.string()).optional().describe('to capture any additional info')
})
)
}),
prompt: clean`Please extract all probable and implicit facts from the following passage.
Portray the first-person as "user".
Capture new relationships.
Try to capture the most up-to-date state of affairs in present tense.
Passage:
"${content}"`
// messages: [
// {
// role: 'system',
// content: clean`Please extract all probable and implicit facts from the following passage.
// Portray the first-person as "user".
// Capture new relationships.
// Try to capture the most up-to-date state of affairs in present tense.`
// },
// {
// role: 'user',
// content: eg.content
// }
// ]
})

return { facts }
}
}
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "@rubriclab/memory",
"module": "index.ts",
"version": "0.0.7",
"version": "0.0.8",
"private": false,
"type": "module",
"devDependencies": {
Expand Down

0 comments on commit 78834be

Please sign in to comment.