Rework algorithm
This commit is contained in:
16
lib/llm.ts
16
lib/llm.ts
@@ -3,6 +3,7 @@ import { fileURLToPath } from "node:url";
|
||||
|
||||
import {
|
||||
type ChatHistoryItem,
|
||||
type LLamaChatPromptOptions,
|
||||
LlamaChatSession,
|
||||
type LlamaModel,
|
||||
createModelDownloader,
|
||||
@@ -19,7 +20,7 @@ export async function getModel(model: string) {
|
||||
});
|
||||
const modelPath = await downloader.download();
|
||||
const llama = await getLlama({
|
||||
maxThreads: 2,
|
||||
maxThreads: 6,
|
||||
});
|
||||
return await llama.loadModel({ modelPath });
|
||||
}
|
||||
@@ -29,7 +30,11 @@ export type Message = {
|
||||
text: string;
|
||||
};
|
||||
|
||||
export async function complete(model: LlamaModel, messages: Message[]) {
|
||||
export async function complete(
|
||||
model: LlamaModel,
|
||||
messages: Message[],
|
||||
options: LLamaChatPromptOptions = {},
|
||||
) {
|
||||
if (messages.length < 1) throw new Error("messages are empty");
|
||||
const init = messages.slice(0, -1);
|
||||
const last = messages.at(-1) as Message;
|
||||
@@ -61,14 +66,11 @@ export async function complete(model: LlamaModel, messages: Message[]) {
|
||||
);
|
||||
|
||||
const res = await session.prompt(last.text, {
|
||||
temperature: 1.0,
|
||||
repeatPenalty: {
|
||||
frequencyPenalty: 1,
|
||||
},
|
||||
trimWhitespaceSuffix: true,
|
||||
onResponseChunk(chunk) {
|
||||
process.stderr.write(chunk.text);
|
||||
},
|
||||
maxTokens: 200,
|
||||
...options,
|
||||
});
|
||||
session.dispose();
|
||||
await context.dispose();
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { api } from "misskey-js";
|
||||
import type { Note } from "misskey-js/entities.js";
|
||||
import { sample } from "./util";
|
||||
import type { Message } from "./llm";
|
||||
import { sample } from "./util";
|
||||
|
||||
export const misskey = new api.APIClient({
|
||||
origin: Bun.env["MISSKEY_ORIGIN"] || "https://misskey.cannorin.net",
|
||||
@@ -19,7 +19,7 @@ export const isSuitableAsInput = (n: Note) =>
|
||||
n.text.length > 0;
|
||||
|
||||
/** randomly sample some notes from the timeline */
|
||||
export async function getNotes() {
|
||||
export async function getNotes(localNotesCount = 5, globalNotesCount = 10) {
|
||||
// randomly sample N local notes
|
||||
const localNotes = (count: number) =>
|
||||
misskey
|
||||
@@ -34,7 +34,10 @@ export async function getNotes() {
|
||||
.then((xs) => xs.filter(isSuitableAsInput))
|
||||
.then((xs) => sample(xs, count));
|
||||
|
||||
const notes = await Promise.all([localNotes(5), globalNotes(10)]);
|
||||
const notes = await Promise.all([
|
||||
localNotes(localNotesCount),
|
||||
globalNotes(globalNotesCount),
|
||||
]);
|
||||
return sample(notes.flat());
|
||||
}
|
||||
|
||||
@@ -48,9 +51,3 @@ export async function expandReplyTree(
|
||||
const reply = await misskey.request("notes/show", { noteId: note.reply.id });
|
||||
return await expandReplyTree(reply, [...acc, note], cutoff - 1);
|
||||
}
|
||||
|
||||
/** convert a note to a chat message */
|
||||
export const noteToMessage = (note: Note): Message => ({
|
||||
type: note.userId === me.id ? ("model" as const) : ("user" as const),
|
||||
text: note.text?.replaceAll(`@${me.username}`, "") || "",
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user