Skip to content

Commit

Permalink
updated project to use GPT-4o (#9)
Browse files Browse the repository at this point in the history
Updated the model to be able to set a new model and to use, by default,
GPT-4o.
  • Loading branch information
Bullrich authored May 13, 2024
1 parent c1fbd4f commit 0116ed1
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 9 deletions.
8 changes: 4 additions & 4 deletions eslint.config.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import globals from "globals";
import pluginJs from "@eslint/js";
import tseslint from "typescript-eslint";
const globals = require("globals");
const pluginJs = require("@eslint/js");
const tseslint = require("typescript-eslint");


export default [
module.exports = [
{ languageOptions: { globals: globals.browser } },
pluginJs.configs.recommended,
...tseslint.configs.recommended,
Expand Down
12 changes: 7 additions & 5 deletions src/story.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ import { ChatAssistant } from "./chat";

type StoryParams = { prompt: string, story: string, temperature: number };

const DEFAULT_MODEL = "gpt-4o";

/** Prompt used to generate the story */
export const systemInfo = `You are Story Bot, a language model that helps users create stories, scripts and more.
Follow the user's instructions carefully and generate the content they requested.
Expand All @@ -21,17 +23,17 @@ export class Story {
public readonly content: string;
public readonly temperature: number;

constructor(private readonly openai: OpenAI, storyParams: StoryParams, private readonly logger: ILogger = console) {
constructor(private readonly openai: OpenAI, storyParams: StoryParams, chatModel: string = DEFAULT_MODEL, private readonly logger: ILogger = console) {
this.prompt = storyParams.prompt;
this.content = storyParams.story;
this.temperature = storyParams.temperature;
this.creationPrompt = [{ role: "system", content: systemInfo }, { role: "user", content: storyParams.prompt }, { role: "assistant", content: storyParams.story }];
this.chat = new ChatAssistant(this.openai, storyParams.temperature);
this.chat = new ChatAssistant(this.openai, storyParams.temperature, chatModel);
}

/** Utility method which allows a Story object to be generated from a prompt with a story */
static async generateStory(prompt: string, openai: OpenAI, logger: ILogger = console): Promise<Story> {
const chat = new ChatAssistant(openai, Math.round(Math.random() * 100) / 100);
static async generateStory(prompt: string, openai: OpenAI, chatModel: string = DEFAULT_MODEL, logger: ILogger = console): Promise<Story> {
const chat = new ChatAssistant(openai, Math.round(Math.random() * 100) / 100, chatModel);
logger.log("Generating story for prompt", prompt);
const story = await chat.chat({ role: "system", content: systemInfo }, { role: "user", content: prompt });

Expand All @@ -40,7 +42,7 @@ export class Story {
}

logger.log("Got the story!", `It is ${story.answer.content.split(" ").length} words long!`);
return new Story(openai, { prompt, story: story.answer.content, temperature: chat.temperature });
return new Story(openai, { prompt, story: story.answer.content, temperature: chat.temperature }, chatModel);
}

async generateTitle(): Promise<string> {
Expand Down

0 comments on commit 0116ed1

Please sign in to comment.