Add support for resolving NIP-05 names on set domains

This commit is contained in:
hzrd149 2025-04-05 16:19:51 +01:00
parent b37664bc5b
commit 9a04f63712
9 changed files with 147 additions and 37 deletions

View File

@ -0,0 +1,5 @@
---
"nsite-gateway": minor
---
Add support for resolving NIP-05 names on set domains

View File

@ -24,6 +24,12 @@ NSITE_HOMEPAGE=""
# a local directory to download the homepage to
NSITE_HOMEPAGE_DIR="public"
# The public domain of the gateway (optional) (used to detect when to show the nsite homepage)
PUBLIC_DOMAIN="nsite.gateway.com"
# The nip-05 domain to use for name resolution
# NIP05_NAME_DOMAINS="example.com,nostr.other.site"
# If this is set, nsite will return the 'Onion-Location' header in responses
# ONION_HOST=https://<hostname>.onion

51
public/404.html Normal file
View File

@ -0,0 +1,51 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>404 - Page Not Found</title>
<style>
body {
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif;
line-height: 1.6;
color: #333;
max-width: 800px;
margin: 40px auto;
padding: 0 20px;
background-color: #f5f5f5;
}
.container {
background-color: white;
padding: 30px;
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
}
h1 {
color: #2c3e50;
margin-bottom: 20px;
}
.info {
background-color: #f8f9fa;
border-left: 4px solid #dc3545;
padding: 15px;
margin: 20px 0;
}
</style>
</head>
<body>
<div class="container">
<h1>404 - Page Not Found</h1>
<div class="info">
<p>We couldn't find an nsite for this domain.</p>
<p>This could mean either:</p>
<ul>
<li>The domain is not configured to point to an nsite</li>
</ul>
</div>
<p>
For more information about setting up an nsite, please refer to the
<a href="https://github.com/hzrd149/nsite-gateway">documentation</a>
</p>
</div>
</body>
</html>

View File

@ -1,6 +1,7 @@
import Keyv from "keyv";
import Keyv, { KeyvOptions } from "keyv";
import { CACHE_PATH, CACHE_TIME } from "./env.js";
import logger from "./logger.js";
import { ParsedEvent } from "./events.js";
const log = logger.extend("cache");
@ -24,11 +25,13 @@ store?.on("error", (err) => {
process.exit(1);
});
const opts = store ? { store } : {};
const json: KeyvOptions = { serialize: JSON.stringify, deserialize: JSON.parse };
const opts: KeyvOptions = store ? { store } : {};
/** A cache that maps a domain to a pubkey ( domain -> pubkey ) */
export const pubkeyDomains = new Keyv<string | undefined>({
...opts,
...json,
namespace: "domains",
ttl: CACHE_TIME * 1000,
});
@ -36,6 +39,7 @@ export const pubkeyDomains = new Keyv<string | undefined>({
/** A cache that maps a pubkey to a set of blossom servers ( pubkey -> servers ) */
export const pubkeyServers = new Keyv<string[] | undefined>({
...opts,
...json,
namespace: "servers",
ttl: CACHE_TIME * 1000,
});
@ -43,13 +47,15 @@ export const pubkeyServers = new Keyv<string[] | undefined>({
/** A cache that maps a pubkey to a set of relays ( pubkey -> relays ) */
export const pubkeyRelays = new Keyv<string[] | undefined>({
...opts,
...json,
namespace: "relays",
ttl: CACHE_TIME * 1000,
});
/** A cache that maps a pubkey + path to sha256 hash of the blob ( pubkey/path -> sha256 ) */
export const pathBlobs = new Keyv<string | undefined>({
export const pathBlobs = new Keyv<ParsedEvent | undefined>({
...opts,
...json,
namespace: "paths",
ttl: CACHE_TIME * 1000,
});
@ -57,6 +63,7 @@ export const pathBlobs = new Keyv<string | undefined>({
/** A cache that maps a sha256 hash to a set of URLs that had the blob ( sha256 -> URLs ) */
export const blobURLs = new Keyv<string[] | undefined>({
...opts,
...json,
namespace: "blobs",
ttl: CACHE_TIME * 1000,
});

View File

@ -1,7 +1,8 @@
import dns from "node:dns";
import { nip19 } from "nostr-tools";
import { nip05, nip19 } from "nostr-tools";
import { pubkeyDomains as pubkeyDomains } from "./cache.js";
import logger from "./logger.js";
import { NIP05_NAME_DOMAINS } from "./env.js";
export function getCnameRecords(hostname: string): Promise<string[]> {
return new Promise<string[]>((res, rej) => {
@ -42,8 +43,8 @@ export async function resolvePubkeyFromHostname(hostname: string): Promise<strin
// check if domain contains an npub
let pubkey = extractPubkeyFromHostname(hostname);
// try to get npub from CNAME or TXT records
if (!pubkey) {
// try to get npub from CNAME
try {
const cnameRecords = await getCnameRecords(hostname);
for (const cname of cnameRecords) {
@ -57,6 +58,7 @@ export async function resolvePubkeyFromHostname(hostname: string): Promise<strin
}
if (!pubkey) {
// Try to get npub from TXT records
try {
const txtRecords = await getTxtRecords(hostname);
@ -72,6 +74,20 @@ export async function resolvePubkeyFromHostname(hostname: string): Promise<strin
} catch (error) {}
}
// Try to get npub from NIP-05
if (!pubkey && NIP05_NAME_DOMAINS) {
for (const domain of NIP05_NAME_DOMAINS) {
try {
const [name] = hostname.split(".");
const result = await nip05.queryProfile(name + "@" + domain);
if (result) {
pubkey = result.pubkey;
break;
}
} catch (err) {}
}
}
log(`Resolved ${hostname} to ${pubkey}`);
await pubkeyDomains.set(hostname, pubkey);

View File

@ -19,6 +19,9 @@ const MAX_FILE_SIZE = process.env.MAX_FILE_SIZE ? xbytes.parseSize(process.env.M
const CACHE_PATH = process.env.CACHE_PATH;
const CACHE_TIME = process.env.CACHE_TIME ? parseInt(process.env.CACHE_TIME) : 60 * 60;
const NIP05_NAME_DOMAINS = process.env.NIP05_NAME_DOMAINS?.split(",").map((d) => d.trim());
const PUBLIC_DOMAIN = process.env.PUBLIC_DOMAIN;
const PAC_PROXY = process.env.PAC_PROXY;
const TOR_PROXY = process.env.TOR_PROXY;
const I2P_PROXY = process.env.I2P_PROXY;
@ -45,4 +48,6 @@ export {
HOST,
ONION_HOST,
CACHE_TIME,
NIP05_NAME_DOMAINS,
PUBLIC_DOMAIN,
};

View File

@ -1,6 +1,14 @@
import { extname, join } from "path";
import { NSITE_KIND } from "./const.js";
import { requestEvents } from "./nostr.js";
import { pathBlobs } from "./cache.js";
export type ParsedEvent = {
pubkey: string;
path: string;
sha256: string;
created_at: number;
};
/** Returns all the `d` tags that should be searched for a given path */
export function getSearchPaths(path: string) {
@ -26,20 +34,24 @@ export function parseNsiteEvent(event: { pubkey: string; tags: string[][]; creat
}
/** Returns the first blob found for a given path */
export async function getNsiteBlob(
pubkey: string,
path: string,
relays: string[],
): Promise<{ sha256: string; path: string; created_at: number } | undefined> {
export async function getNsiteBlob(pubkey: string, path: string, relays: string[]): Promise<ParsedEvent | undefined> {
const key = pubkey + path;
const cached = await pathBlobs.get(key);
if (cached) return cached;
// NOTE: hack, remove "/" paths since it breaks some relays
const paths = getSearchPaths(path).filter((p) => p !== "/");
const events = await requestEvents(relays, { kinds: [NSITE_KIND], "#d": paths, authors: [pubkey] });
// Sort the found blobs by the order of the paths array
const blobs = Array.from(events)
const options = Array.from(events)
.map(parseNsiteEvent)
.filter((e) => !!e)
.sort((a, b) => paths.indexOf(a.path) - paths.indexOf(b.path));
return blobs[0];
// Remember the blob for this path
if (options.length > 0) await pathBlobs.set(key, options[0]);
return options[0];
}

View File

@ -22,6 +22,7 @@ import {
NSITE_HOST,
NSITE_PORT,
ONION_HOST,
PUBLIC_DOMAIN,
SUBSCRIPTION_RELAYS,
} from "./env.js";
import pool, { getUserBlossomServers, getUserOutboxes } from "./nostr.js";
@ -59,18 +60,21 @@ app.use(async (ctx, next) => {
});
// handle nsite requests
app.use(async (ctx, next) => {
app.use(async (ctx) => {
let pubkey = await resolvePubkeyFromHostname(ctx.hostname);
if (!pubkey) {
if (NSITE_HOMEPAGE) {
if (!pubkey && NSITE_HOMEPAGE && (!PUBLIC_DOMAIN || ctx.hostname === PUBLIC_DOMAIN)) {
const parsed = nip19.decode(NSITE_HOMEPAGE);
// TODO: use the relays in the nprofile
if (parsed.type === "nprofile") pubkey = parsed.data.pubkey;
else if (parsed.type === "npub") pubkey = parsed.data;
else return await next();
} else return await next();
}
if (!pubkey) {
ctx.status = 404;
ctx.body = fs.readFileSync(path.resolve(__dirname, "../public/404.html"), "utf-8");
return;
}
// fetch relays

View File

@ -1,22 +1,27 @@
import { nip19 } from "nostr-tools";
import { npubEncode } from "nostr-tools/nip19";
import { SUBSCRIPTION_RELAYS } from "./env.js";
import { parseNsiteEvent } from "./events.js";
import pool from "./nostr.js";
import { NSITE_KIND } from "./const.js";
import logger from "./logger.js";
import { pathBlobs } from "./cache.js";
const log = logger.extend("invalidation");
export function watchInvalidation() {
// invalidate nginx cache on new events
if (SUBSCRIPTION_RELAYS.length > 0) {
if (SUBSCRIPTION_RELAYS.length === 0) return;
logger(`Listening for new nsite events on: ${SUBSCRIPTION_RELAYS.join(", ")}`);
pool.subscribeMany(SUBSCRIPTION_RELAYS, [{ kinds: [NSITE_KIND], since: Math.round(Date.now() / 1000) - 60 * 60 }], {
onevent: async (event) => {
try {
const nsite = parseNsiteEvent(event);
if (nsite) {
const log = logger.extend(nip19.npubEncode(nsite.pubkey));
const parsed = parseNsiteEvent(event);
if (parsed) {
pathBlobs.delete(parsed.pubkey + parsed.path);
log(`Invalidated ${npubEncode(parsed.pubkey) + parsed.path}`);
}
} catch (error) {
console.log(`Failed to invalidate ${event.id}`);
@ -24,4 +29,3 @@ export function watchInvalidation() {
},
});
}
}