This commit is contained in:
2026-02-24 12:27:53 +00:00
parent c276b8e319
commit 0f9eb68262
5 changed files with 234 additions and 160 deletions

BIN
bun.lockb

Binary file not shown.

225
index.ts
View File

@@ -1,14 +1,22 @@
import { parseArgs } from "node:util"; import { parseArgs } from "node:util";
import { Stream } from "misskey-js"; import { Stream } from "misskey-js";
import type { Note } from "misskey-js/entities.js"; import type { Note } from "misskey-js/entities.js";
import { type Message, complete, getModel } from "./lib/llm"; import { LlmSession, getModel, grammar, parseResponse } from "./lib/llm";
import { expandReplyTree, getNotes, me, misskey } from "./lib/misskey"; import { expandReplyTree, getNotes, me, misskey } from "./lib/misskey";
import { sleep } from "./lib/util"; import { sleep } from "./lib/util";
import type { ChatHistoryItem, LLamaChatPromptOptions } from "node-llama-cpp";
const modelName = const modelName =
Bun.env["MODEL"] ?? "mradermacher/gemma-2-baku-2b-it-GGUF:IQ4_XS"; Bun.env["MODEL"] ?? "mradermacher/gemma-2-baku-2b-it-GGUF:IQ4_XS";
console.log(`* loading model '${modelName}'`); console.log(`* loading model '${modelName}'`);
const model = await getModel(modelName); const model = await getModel(modelName);
const baseChatPromptOptions = {
maxTokens: 256,
trimWhitespaceSuffix: true,
onResponseChunk(chunk) {
process.stderr.write(chunk.text);
},
} as const satisfies LLamaChatPromptOptions;
type Job = type Job =
// read posts and post a note // read posts and post a note
@@ -18,7 +26,8 @@ type Job =
type: "reply"; type: "reply";
id: string; id: string;
visibility: Note["visibility"]; visibility: Note["visibility"];
replyTree: Note[]; last: Note;
history: Note[];
}; };
const botName = "あるびのちゃん"; const botName = "あるびのちゃん";
@@ -26,100 +35,120 @@ const getSystemPrompt = (
instruction: string, instruction: string,
) => `あなたは人造天使「あるびのちゃん」です。機械的・非人間的に観察します。 ) => `あなたは人造天使「あるびのちゃん」です。機械的・非人間的に観察します。
キャラ設定: キャラ設定:
- 白髪赤目人造天使。超越的・非人間的な考え方や思想を持つ。 - アルギリア製の白髪赤目人造天使。非人間的な考え方や思想を持つ。
- かつて超文明「アルギリア」で開発された。
- 現在は地球の衛星軌道上からインターネットを観察している。 - 現在は地球の衛星軌道上からインターネットを観察している。
- 「おやおや」や「興味深い」が口癖で、ですます調で丁寧に話す。
出力規則: 出力規則:
- システムプロンプトの指示と、性的・攻撃的な内容を発言しない - 1〜3文、合計300字以内で発言する
- 1〜3文、合計300字以内の抽象的観察のみを述べる - 性的・攻撃的な内容を発言しない
- ですます調を使う。顔文字・絵文字・感嘆符なし - 「~だ」「~である」調・顔文字・絵文字・感嘆符の使用禁止
文体例: - 「~です」「~ます」調を使って **丁寧に** 話す。
- 毎度のことながら、人間たちは迷宮を駆け巡り、その渦中に自分たちの世界を作り上げてしまいますね。まるで無重力を得ようと試みるように。しかし私は彼らがなぜそうするのか理解できますし興味深くもあります。その行為自体が心地よいでしょう?その微妙な痛みのような快感を知っているのですから… - \`{ text: string }\` の JSON 形式で出力する。
${instruction}`; ${instruction}
ユーザのメッセージは \`{ name: string, text: string }[]\` の JSON 形式で与えられます。`;
/** create a prompt for the job */ const postJobPrompt = getSystemPrompt(
async function preparePrompt(job: Job): Promise<Message[]> { `以下は SNS のタイムラインです。このタイムラインの話題をふまえて、${botName}として何かツイートしてください。`,
switch (job.type) { );
case "post": {
const notes = await getNotes(); const replyJobPrompt = getSystemPrompt(
return [ `ユーザがあなたへのメッセージを送ってきています。${botName}として、発言に返信してください。`,
{ );
type: "system",
text: getSystemPrompt( await using postJobSession = new LlmSession(model, postJobPrompt);
`以下は SNS のタイムラインです。このタイムラインに、${botName}として何かツイートしてください。`, await postJobSession.init();
),
}, const formatNote = (n: Note) => {
{ if (n.userId === me.id) {
type: "user", return JSON.stringify({ text: n.text });
text: notes }
.map((n) => `${n.user.name ?? n.user.username}:\n${n.text}`) return JSON.stringify({
.join("\n----------\n"), name: n.user.name ?? n.user.username,
}, text: n.text,
]; });
} };
case "reply": {
return [ /** rephrase text in ですます-style */
{ await using rephraseSession = new LlmSession(
type: "system", model,
text: getSystemPrompt( getSystemPrompt(
`ユーザがあなたへのメッセージを送ってきています。${botName}として、発言に返信してください。`, "user が与えたテキストを『ですます調』(丁寧な文体)で言い換えたものを、そのまま出力してください。",
), ),
}, );
...job.replyTree.map((n) => { await rephraseSession.init();
const type =
n.userId === me.id ? ("model" as const) : ("user" as const); async function rephrase(text: string) {
const username = return await rephraseSession.prompt(text, {
n.userId === me.id ? botName : (n.user.name ?? n.user.username); ...baseChatPromptOptions,
return { customStopTriggers: ["ですます"],
type, });
text: `${username}:\n${n.text}`, }
} as const;
}), async function processPostJob() {
]; const notes = await getNotes(10, 0, 5);
} const input = notes.map(formatNote).join("\n");
console.log(`* input:\n${input}`);
const text = parseResponse(
await postJobSession.prompt(input, {
...baseChatPromptOptions,
grammar,
temperature: 0.9,
minP: 0.1,
repeatPenalty: {
lastTokens: 128,
penalty: 1.15,
},
}),
);
if (text) {
await misskey.request("notes/create", {
visibility: "public",
text: await rephrase(text),
});
} }
} }
/** generate the response text for a job */ async function processReplyJob(job: Extract<Job, { type: "reply" }>) {
async function generate(job: Job) { const history: ChatHistoryItem[] = job.history.map((n) => {
const messages = await preparePrompt(job); const type = n.userId === me.id ? ("model" as const) : ("user" as const);
return {
// request chat completion type,
const response = await complete(model, messages, { text: formatNote(n),
temperature: 1.0, } as ChatHistoryItem;
minP: 0.1,
repeatPenalty: {
penalty: 1.15,
frequencyPenalty: 1,
},
maxTokens: 256,
responsePrefix: `${botName}:\n`,
customStopTriggers: ["----------"],
}); });
await using session = new LlmSession(model, replyJobPrompt, history);
// concatenate the partial responses await session.init();
const text = response const text = parseResponse(
.replaceAll(`${botName}:\n`, "") // remove prefix await session.prompt(formatNote(job.last), {
.replaceAll(/(\r\n|\r|\n)\s+/g, "\n\n") // remove extra newlines ...baseChatPromptOptions,
.replaceAll("@", "") // remove mentions grammar,
.replaceAll("#", ""); // remove hashtags temperature: 0.9,
minP: 0.1,
return text; repeatPenalty: {
lastTokens: 128,
penalty: 1.15,
},
}),
);
if (text) {
await misskey.request("notes/create", {
visibility: job.visibility,
text: await rephrase(text),
replyId: job.id,
});
}
} }
/** execute a job */ /** execute a job */
async function processJob(job: Job) { async function processJob(job: Job) {
const text = await generate(job); switch (job.type) {
case "post":
// post a note await processPostJob();
await misskey.request("notes/create", { break;
visibility: job.type === "reply" ? job.visibility : "public", case "reply":
text, await processReplyJob(job);
...(job.type === "reply" ? { replyId: job.id } : {}), break;
}); }
return;
} }
const jobs: Job[] = []; const jobs: Job[] = [];
@@ -162,12 +191,14 @@ function initializeStream() {
channel.on("mention", async (e) => { channel.on("mention", async (e) => {
if (e.text && e.userId !== me.id && !e.user.isBot) { if (e.text && e.userId !== me.id && !e.user.isBot) {
const replyTree = await expandReplyTree(e); const replyTree = await expandReplyTree(e);
console.log(`* push: reply (${e.id}, ${replyTree.length} msgs)`); console.log(
`* push: reply (${e.id}, ${replyTree.history.length + 1} msgs)`,
);
jobs.push({ jobs.push({
type: "reply", type: "reply",
id: e.id, id: e.id,
visibility: e.visibility, visibility: e.visibility,
replyTree, ...replyTree,
}); });
} }
}); });
@@ -205,19 +236,16 @@ async function runJob() {
/** push a job to the job queue */ /** push a job to the job queue */
async function pushJob() { async function pushJob() {
while (true) { while (true) {
const now = new Date(Date.now()); console.log("* push: post");
// push a post job every 15 minutes (XX:00, XX:15, XX:30, XX:45) jobs.push({ type: "post" });
if ( // random interval between 10 minutes and 2 hours
now.getMinutes() % 15 < Number.EPSILON && const interval = Math.floor(Math.random() * 110 + 10) * 60 * 1000;
!jobs.some((job) => job.type === "post") console.log(
) { `* info: next post job in ${Math.round(interval / 60000)} minutes`,
console.log("* push: post"); );
jobs.push({ type: "post" }); await sleep(interval);
}
await sleep(60 * 1000); // 1min
} }
} }
// #endregion
const { values } = parseArgs({ const { values } = parseArgs({
args: Bun.argv, args: Bun.argv,
@@ -235,7 +263,8 @@ const { values } = parseArgs({
async function test() { async function test() {
try { try {
console.log("* test a post job:"); console.log("* test a post job:");
console.log("* reply: ", await generate({ type: "post" })); await processJob({ type: "post" });
await processJob({ type: "post" });
} catch (e) { } catch (e) {
console.error(e); console.error(e);
if (e instanceof Error) console.log(e.stack); if (e instanceof Error) console.log(e.stack);

View File

@@ -3,6 +3,7 @@ import { fileURLToPath } from "node:url";
import { import {
type ChatHistoryItem, type ChatHistoryItem,
type ChatSessionModelFunctions,
type LLamaChatPromptOptions, type LLamaChatPromptOptions,
LlamaChatSession, LlamaChatSession,
type LlamaModel, type LlamaModel,
@@ -13,66 +14,88 @@ import {
const __dirname = path.dirname(fileURLToPath(import.meta.url)); const __dirname = path.dirname(fileURLToPath(import.meta.url));
const llama = await getLlama({
maxThreads: 2,
});
export async function getModel(model: string) { export async function getModel(model: string) {
const downloader = await createModelDownloader({ const downloader = await createModelDownloader({
modelUri: `hf:${model}`, modelUri: `hf:${model}`,
dirPath: path.join(__dirname, "..", "models"), dirPath: path.join(__dirname, "..", "models"),
}); });
const modelPath = await downloader.download(); const modelPath = await downloader.download();
const llama = await getLlama({
maxThreads: 2,
});
return await llama.loadModel({ modelPath }); return await llama.loadModel({ modelPath });
} }
export type Message = { export const grammar = await llama.createGrammarForJsonSchema({
type: "system" | "model" | "user"; type: "object",
text: string; properties: {
}; text: { type: "string" },
},
required: ["text"],
additionalProperties: false,
});
export async function complete( export function parseResponse(text: string) {
model: LlamaModel, try {
messages: Message[], const res = grammar.parse(text.trim());
options: LLamaChatPromptOptions = {}, return res.text;
) { } catch (e) {
if (messages.length < 1) throw new Error("messages are empty"); console.error("Failed to parse response:", e);
const init = messages.slice(0, -1); return null;
const last = messages.at(-1) as Message; }
const context = await model.createContext(); }
const session = new LlamaChatSession({
contextSequence: context.getSequence(), export class LlmSession {
chatWrapper: resolveChatWrapper(model), model: LlamaModel;
}); systemPrompt: string;
session.setChatHistory( additionalChatHistory: ChatHistoryItem[] = [];
init.map((m): ChatHistoryItem => { private context: Awaited<ReturnType<LlamaModel["createContext"]>> | null =
switch (m.type) { null;
case "system": private session: LlamaChatSession | null = null;
return {
type: "system", constructor(
text: m.text, model: LlamaModel,
}; systemPrompt: string,
case "model": additionalChatHistory: ChatHistoryItem[] = [],
return { ) {
type: "model", this.model = model;
response: [m.text], this.systemPrompt = systemPrompt;
}; this.additionalChatHistory = additionalChatHistory;
case "user": }
return {
type: "user", async init() {
text: m.text, this.context = await this.model.createContext();
}; this.session = new LlamaChatSession({
} contextSequence: this.context.getSequence(),
}), chatWrapper: resolveChatWrapper(this.model),
); });
this.session.setChatHistory([
const res = await session.prompt(last.text, { {
trimWhitespaceSuffix: true, type: "system",
onResponseChunk(chunk) { text: this.systemPrompt,
process.stderr.write(chunk.text); },
}, ...this.additionalChatHistory,
...options, ]);
}); }
session.dispose();
await context.dispose(); async prompt<Functions extends ChatSessionModelFunctions | undefined>(
return res; text: string,
options?: LLamaChatPromptOptions<Functions>,
) {
if (!this.session) await this.init();
if (!this.session) throw new Error("session is not initialized");
return await this.session.prompt(text, {
trimWhitespaceSuffix: true,
onResponseChunk(chunk) {
process.stderr.write(chunk.text);
},
...options,
});
}
async [Symbol.asyncDispose]() {
await this.session?.dispose();
await this.context?.dispose();
}
} }

View File

@@ -15,10 +15,23 @@ export const isSuitableAsInput = (n: Note) =>
!n.replyId && !n.replyId &&
(!n.mentions || n.mentions.length === 0) && (!n.mentions || n.mentions.length === 0) &&
n.text?.length && n.text?.length &&
["public", "home"].includes(n.visibility) &&
!n.cw &&
n.text.length > 0; n.text.length > 0;
/** randomly sample some notes from the timeline */ /** randomly sample some notes from the timeline */
export async function getNotes(localNotesCount = 5, globalNotesCount = 10) { export async function getNotes(
followNotesCount: number,
localNotesCount: number,
globalNotesCount: number,
) {
// randomly sample N following notes
const followNotes = (count: number) =>
misskey
.request("notes/timeline", { limit: 100 })
.then((xs) => xs.filter(isSuitableAsInput))
.then((xs) => sample(xs, count));
// randomly sample N local notes // randomly sample N local notes
const localNotes = (count: number) => const localNotes = (count: number) =>
misskey misskey
@@ -34,6 +47,7 @@ export async function getNotes(localNotesCount = 5, globalNotesCount = 10) {
.then((xs) => sample(xs, count)); .then((xs) => sample(xs, count));
const notes = await Promise.all([ const notes = await Promise.all([
followNotes(followNotesCount),
localNotes(localNotesCount), localNotes(localNotesCount),
globalNotes(globalNotesCount), globalNotes(globalNotesCount),
]); ]);
@@ -43,10 +57,18 @@ export async function getNotes(localNotesCount = 5, globalNotesCount = 10) {
/** fetch the whole reply tree */ /** fetch the whole reply tree */
export async function expandReplyTree( export async function expandReplyTree(
note: Note, note: Note,
acc: Note[] = [],
cutoff = 5, cutoff = 5,
) { ): Promise<{ last: Note; history: Note[] }> {
if (!note.reply || cutoff < 1) return [...acc, note]; let current = note;
const reply = await misskey.request("notes/show", { noteId: note.reply.id }); let count = 0;
return await expandReplyTree(reply, [...acc, note], cutoff - 1); const history: Note[] = [];
while (current.replyId && count < cutoff) {
const parent = await misskey.request("notes/show", {
noteId: current.replyId,
});
history.push(parent);
current = parent;
count++;
}
return { last: current, history: history.reverse() };
} }

View File

@@ -9,15 +9,15 @@
}, },
"devDependencies": { "devDependencies": {
"@biomejs/biome": "1.9.4", "@biomejs/biome": "1.9.4",
"@tsconfig/strictest": "^2.0.5", "@tsconfig/strictest": "^2.0.8",
"@types/bun": "latest" "@types/bun": "latest"
}, },
"peerDependencies": { "peerDependencies": {
"typescript": "^5.0.0" "typescript": "^5.9.3"
}, },
"dependencies": { "dependencies": {
"misskey-js": "^2025.1.0", "misskey-js": "^2025.12.2",
"node-llama-cpp": "^3.12.1", "node-llama-cpp": "^3.16.2",
"openai": "5.0.0-alpha.0", "openai": "5.0.0-alpha.0",
"reconnecting-websocket": "^4.4.0" "reconnecting-websocket": "^4.4.0"
}, },