From 0116ed14651c3d1f12e169fdb5a792c690018fa5 Mon Sep 17 00:00:00 2001 From: Javier Bullrich Date: Mon, 13 May 2024 20:07:15 +0200 Subject: [PATCH] updated project to use GPT-4o (#9) Updated the model to be able to set a new model and to use, by default, GPT-4o. --- eslint.config.js | 8 ++++---- src/story.ts | 12 +++++++----- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/eslint.config.js b/eslint.config.js index 22aa85a..7bb1417 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -1,9 +1,9 @@ -import globals from "globals"; -import pluginJs from "@eslint/js"; -import tseslint from "typescript-eslint"; +const globals = require("globals"); +const pluginJs = require("@eslint/js"); +const tseslint = require("typescript-eslint"); -export default [ +module.exports = [ { languageOptions: { globals: globals.browser } }, pluginJs.configs.recommended, ...tseslint.configs.recommended, diff --git a/src/story.ts b/src/story.ts index eaa73f2..ddb7b75 100644 --- a/src/story.ts +++ b/src/story.ts @@ -5,6 +5,8 @@ import { ChatAssistant } from "./chat"; type StoryParams = { prompt: string, story: string, temperature: number }; +const DEFAULT_MODEL = "gpt-4o"; + /** Prompt used to generate the story */ export const systemInfo = `You are Story Bot, a language model that helps users create stories, scripts and more. Follow the user's instructions carefully and generate the content they requested. @@ -21,17 +23,17 @@ export class Story { public readonly content: string; public readonly temperature: number; - constructor(private readonly openai: OpenAI, storyParams: StoryParams, private readonly logger: ILogger = console) { + constructor(private readonly openai: OpenAI, storyParams: StoryParams, chatModel: string = DEFAULT_MODEL, private readonly logger: ILogger = console) { this.prompt = storyParams.prompt; this.content = storyParams.story; this.temperature = storyParams.temperature; this.creationPrompt = [{ role: "system", content: systemInfo }, { role: "user", content: storyParams.prompt }, { role: "assistant", content: storyParams.story }]; - this.chat = new ChatAssistant(this.openai, storyParams.temperature); + this.chat = new ChatAssistant(this.openai, storyParams.temperature, chatModel); } /** Utility method which allows a Story object to be generated from a prompt with a story */ - static async generateStory(prompt: string, openai: OpenAI, logger: ILogger = console): Promise { - const chat = new ChatAssistant(openai, Math.round(Math.random() * 100) / 100); + static async generateStory(prompt: string, openai: OpenAI, chatModel: string = DEFAULT_MODEL, logger: ILogger = console): Promise { + const chat = new ChatAssistant(openai, Math.round(Math.random() * 100) / 100, chatModel); logger.log("Generating story for prompt", prompt); const story = await chat.chat({ role: "system", content: systemInfo }, { role: "user", content: prompt }); @@ -40,7 +42,7 @@ export class Story { } logger.log("Got the story!", `It is ${story.answer.content.split(" ").length} words long!`); - return new Story(openai, { prompt, story: story.answer.content, temperature: chat.temperature }); + return new Story(openai, { prompt, story: story.answer.content, temperature: chat.temperature }, chatModel); } async generateTitle(): Promise {