improve site caching

This commit is contained in:
hzrd149 2024-09-07 16:40:16 -05:00
parent a262539366
commit 34d373e532
9 changed files with 1199 additions and 341 deletions

4
.env
View File

@ -1,2 +1,4 @@
NOSTR_RELAYS=wss://nostrue.com
NOSTR_RELAYS=wss://nostrue.com,wss://nos.lol,wss://relay.damus.io
BLOSSOM_SERVERS=https://cdn.hzrd149.com
MAX_FILE_SIZE='2 MB'

8
.env.example Normal file
View File

@ -0,0 +1,8 @@
# A list of nostr relays to search
NOSTR_RELAYS=wss://nostrue.com,wss://nos.lol,wss://relay.damus.io
# A list of fallback blossom servers
BLOSSOM_SERVERS=https://cdn.satellite.earth
# The max file size to serve
MAX_FILE_SIZE='2 MB'

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
node_modules
build
.env
data

View File

@ -21,6 +21,7 @@
"admin/dist"
],
"dependencies": {
"@keyv/sqlite": "^4.0.1",
"@koa/cors": "^5.0.0",
"@koa/router": "^12.0.1",
"@nostr-dev-kit/ndk": "^2.10.0",
@ -29,14 +30,17 @@
"dotenv": "^16.4.5",
"follow-redirects": "^1.15.6",
"http-errors": "1",
"keyv": "^5.0.1",
"koa": "^2.15.3",
"koa-mount": "^4.0.0",
"koa-send": "^5.0.1",
"koa-static": "^5.0.0",
"mime": "^4.0.4",
"nostr-tools": "^2.7.2",
"socks-proxy-agent": "^8.0.4",
"websocket-polyfill": "^1.0.0",
"ws": "^8.18.0"
"ws": "^8.18.0",
"xbytes": "^1.9.1"
},
"devDependencies": {
"@changesets/cli": "^2.27.1",
@ -49,6 +53,7 @@
"@types/koa": "^2.14.0",
"@types/koa-basic-auth": "^2.0.6",
"@types/koa-mount": "^4.0.5",
"@types/koa-send": "^4.1.6",
"@types/koa-static": "^4.0.4",
"@types/koa__cors": "^5.0.0",
"@types/koa__router": "^12.0.4",

16
src/cache.ts Normal file
View File

@ -0,0 +1,16 @@
import Keyv from "keyv";
import KeyvSqlite from "@keyv/sqlite";
import pfs from "fs/promises";
try {
await pfs.mkdir("data");
} catch (error) {}
const keyvSqlite = new KeyvSqlite({ dialect: "sqlite", uri: "./data/cache.db" });
keyvSqlite.on("error", (err) => {
console.log("Connection Error", err);
process.exit(1);
});
export const files = new Keyv({ store: keyvSqlite, ttl: 1000 * 60 * 60 * 24, namespace: "files" });
export const downloaded = new Keyv({ store: keyvSqlite, ttl: 1000 * 30, namespace: "downloaded" });

79
src/downloader.ts Normal file
View File

@ -0,0 +1,79 @@
import fs from "fs";
import pfs from "fs/promises";
import { NSITE_KIND } from "./const.js";
import ndk from "./ndk.js";
import { BLOSSOM_SERVERS, MAX_FILE_SIZE } from "./env.js";
import { makeRequestWithAbort } from "./helpers/http.js";
import { dirname, join } from "path";
import { downloaded, files } from "./cache.js";
import { getServersFromServerListEvent, USER_BLOSSOM_SERVER_LIST_KIND } from "blossom-client-sdk";
// TODO: download the file to /tmp and verify it
async function downloadFile(sha256: string, servers = BLOSSOM_SERVERS) {
for (const server of servers) {
try {
const { response } = await makeRequestWithAbort(new URL(sha256, server));
if (!response.statusCode) throw new Error("Missing headers or status code");
const size = response.headers["content-length"];
if (size && parseInt(size) > MAX_FILE_SIZE) {
throw new Error("File too large");
}
if (response.statusCode >= 200 && response.statusCode < 300) {
return response;
} else {
// Consume response data to free up memory
response.resume();
}
} catch (error) {
// ignore error, try next server
}
}
throw new Error("No server found");
}
export async function downloadSite(pubkey: string) {
if (await downloaded.get(pubkey)) return;
const user = await ndk.getUser({ pubkey });
const blossomServers = await ndk.fetchEvent([{ kinds: [USER_BLOSSOM_SERVER_LIST_KIND], authors: [pubkey] }]);
const servers = blossomServers ? getServersFromServerListEvent(blossomServers).map((u) => u.toString()) : [];
const nsiteEvents = await ndk.fetchEvents([{ kinds: [NSITE_KIND], authors: [pubkey] }]);
servers.push(...BLOSSOM_SERVERS);
console.log(`Found ${nsiteEvents.size} events for ${pubkey}`);
for (const event of nsiteEvents) {
const path = event.dTag;
const sha256 = event.tagValue("x") || event.tagValue("sha256");
if (!path || !sha256) continue;
const current = await files.get(join(pubkey, path));
if (sha256 === current) continue;
try {
await pfs.mkdir(dirname(join("data/sites", pubkey, path)), { recursive: true });
} catch (error) {}
try {
const res = await downloadFile(sha256, servers);
console.log(`Downloading ${pubkey}${path}`);
res.pipe(fs.createWriteStream(join("data/sites", pubkey, path)));
await files.set(join(pubkey, path), sha256);
} catch (error) {
console.log(`Failed to download ${join(pubkey, path)}`, error);
}
}
console.log(`Finished downloading ${pubkey}`);
await downloaded.set(pubkey, true);
}

View File

@ -1,9 +1,11 @@
import "dotenv/config";
import xbytes from "xbytes";
const NOSTR_RELAYS = process.env.NOSTR_RELAYS?.split(",") ?? [];
const BLOSSOM_SERVERS = process.env.BLOSSOM_SERVERS?.split(",") ?? [];
if (NOSTR_RELAYS.length === 0) throw new Error("Requires at least one relay in NOSTR_RELAYS");
if (BLOSSOM_SERVERS.length === 0) throw new Error("Requires at least one server in BLOSSOM_SERVERS");
const MAX_FILE_SIZE = process.env.MAX_FILE_SIZE ? xbytes.parseSize(process.env.MAX_FILE_SIZE) : Infinity;
export { NOSTR_RELAYS, BLOSSOM_SERVERS };
if (NOSTR_RELAYS.length === 0) throw new Error("Requires at least one relay in NOSTR_RELAYS");
export { NOSTR_RELAYS, BLOSSOM_SERVERS, MAX_FILE_SIZE };

View File

@ -2,19 +2,17 @@
import "./polyfill.js";
import Koa from "koa";
import serve from "koa-static";
import path from "node:path";
import path, { join } from "node:path";
import cors from "@koa/cors";
import fs from "node:fs";
import { fileURLToPath } from "node:url";
import HttpErrors from "http-errors";
import send from "koa-send";
import logger from "./logger.js";
import { isHttpError } from "./helpers/error.js";
import { resolveNpubFromHostname } from "./helpers/dns.js";
import ndk from "./ndk.js";
import { NSITE_KIND } from "./const.js";
import { BLOSSOM_SERVERS } from "./env.js";
import { makeRequestWithAbort } from "./helpers/http.js";
import { downloadSite } from "./downloader.js";
import { downloaded } from "./cache.js";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
@ -47,46 +45,22 @@ app.use(async (ctx, next) => {
}
});
// serve nsite files
// map pubkeys to folders in sites dir
app.use(async (ctx, next) => {
const pubkey = (ctx.state.pubkey = await resolveNpubFromHostname(ctx.hostname));
if (pubkey) {
const event = await ndk.fetchEvent([
{ kinds: [NSITE_KIND], "#d": [ctx.path, ctx.path.replace(/^\//, "")], authors: [pubkey] },
]);
if (!event) throw new HttpErrors.NotFound("Failed to find event for path");
const sha256 = event.tags.find((t) => t[0] === "x" || t[0] === "sha256")?.[1];
if (!sha256) throw new HttpErrors.BadGateway("Failed to find file for path");
for (const server of BLOSSOM_SERVERS) {
try {
const { response } = await makeRequestWithAbort(new URL(sha256, server));
const { headers, statusCode } = response;
if (!headers || !statusCode) throw new Error("Missing headers or status code");
if (statusCode >= 200 && statusCode < 300) {
ctx.status = statusCode;
// @ts-expect-error
ctx.set(headers);
ctx.response.body = response;
} else {
// Consume response data to free up memory
response.resume();
}
} catch (error) {
// ignore error, try next server
}
if (!(await downloaded.get(pubkey))) {
await downloadSite(pubkey);
}
// throw new HttpErrors.NotFound(`Unable to find ${sha256} on blossom servers`);
await send(ctx, join(pubkey, ctx.path), { root: "data/sites", index: "index.html" });
} else await next();
});
// serve static sites
app.use(serve("sites"));
// serve static files from public
try {
const www = path.resolve(process.cwd(), "public");

1369
yarn.lock

File diff suppressed because it is too large Load Diff