Rework algorithm
This commit is contained in:
14
lib/llm.ts
14
lib/llm.ts
@@ -3,6 +3,7 @@ import { fileURLToPath } from "node:url";
|
||||
|
||||
import {
|
||||
type ChatHistoryItem,
|
||||
type LLamaChatPromptOptions,
|
||||
LlamaChatSession,
|
||||
type LlamaModel,
|
||||
createModelDownloader,
|
||||
@@ -29,7 +30,11 @@ export type Message = {
|
||||
text: string;
|
||||
};
|
||||
|
||||
export async function complete(model: LlamaModel, messages: Message[]) {
|
||||
export async function complete(
|
||||
model: LlamaModel,
|
||||
messages: Message[],
|
||||
options: LLamaChatPromptOptions = {},
|
||||
) {
|
||||
if (messages.length < 1) throw new Error("messages are empty");
|
||||
const init = messages.slice(0, -1);
|
||||
const last = messages.at(-1) as Message;
|
||||
@@ -61,14 +66,11 @@ export async function complete(model: LlamaModel, messages: Message[]) {
|
||||
);
|
||||
|
||||
const res = await session.prompt(last.text, {
|
||||
temperature: 1.0,
|
||||
repeatPenalty: {
|
||||
frequencyPenalty: 1,
|
||||
},
|
||||
trimWhitespaceSuffix: true,
|
||||
onResponseChunk(chunk) {
|
||||
process.stderr.write(chunk.text);
|
||||
},
|
||||
maxTokens: 200,
|
||||
...options,
|
||||
});
|
||||
session.dispose();
|
||||
await context.dispose();
|
||||
|
||||
Reference in New Issue
Block a user