src / modelCache.ts
import fs from "node:fs";
import os from "node:os";
import path from "node:path";
const cacheDir = path.join(os.homedir(), ".lmstudio", "openai-compat-endpoint");
const cachePath = path.join(cacheDir, "models-cache.json");
type ModelCache = {
models?: string[];
lastSelected?: string;
};
export function readCachedState(): { models: string[]; lastSelected?: string } {
try {
const raw = fs.readFileSync(cachePath, "utf8");
const parsed = JSON.parse(raw) as ModelCache;
const models = Array.isArray(parsed?.models)
? parsed.models.filter((id): id is string => typeof id === "string" && id.length > 0)
: [];
const lastSelected =
typeof parsed?.lastSelected === "string" && parsed.lastSelected.length > 0
? parsed.lastSelected
: undefined;
return { models, lastSelected };
} catch {
return { models: [] };
}
}
export async function writeCachedState(
models: string[],
lastSelected?: string,
): Promise<void> {
await fs.promises.mkdir(cacheDir, { recursive: true });
const payload: ModelCache = { models, lastSelected };
await fs.promises.writeFile(cachePath, JSON.stringify(payload, null, 2), "utf8");
}