Compare commits

...

68 Commits

Author SHA1 Message Date
hzrd149
364e947245
Merge pull request #13 from hzrd149/changeset-release/master
Version Packages
2025-06-09 18:18:08 -05:00
github-actions[bot]
53a4b9f548 Version Packages 2025-06-09 23:17:59 +00:00
hzrd149
1473eee4fd Fix returning setup page when event can't be found for pubkey 2025-06-09 18:17:32 -05:00
hzrd149
7fa6a79d3b
Merge pull request #12 from hzrd149/changeset-release/master
Version Packages
2025-04-24 11:15:22 -05:00
github-actions[bot]
808ffa77be Version Packages 2025-04-05 16:07:17 +00:00
hzrd149
243fe2cd5a add caddy example 2025-04-05 17:06:53 +01:00
hzrd149
14d767114a fallback to public folder when using nsite homepage 2025-04-05 16:41:07 +01:00
hzrd149
9a04f63712 Add support for resolving NIP-05 names on set domains 2025-04-05 16:19:51 +01:00
hzrd149
b37664bc5b Cleanup DNS pubkey resolution 2025-04-05 15:31:28 +01:00
hzrd149
ef5262f73c Remove nginx cache invalidations
Remove screenshots
Fix race condition bug
2025-04-05 15:13:05 +01:00
hzrd149
c3778507d4 remove node sea
add CACHE_TIME variable
2025-03-25 08:26:53 +00:00
hzrd149
80aab93bb7 remove publish next 2025-03-24 21:46:36 +00:00
hzrd149
225b616a3c fix node sea 2025-03-24 21:46:03 +00:00
hzrd149
638f798df1 add node SEA
change build folder
2025-03-24 21:44:40 +00:00
hzrd149
d87497e6c0 cleanup
bump dependencies
2025-03-17 17:35:11 +00:00
hzrd149
b2b8e0108e Make blossom requests in parallel 2025-03-16 22:18:44 +00:00
hzrd149
2fc6fbc2f1
Merge pull request #11 from hzrd149/changeset-release/master
Version Packages
2025-03-07 17:47:48 +00:00
github-actions[bot]
8fee897834 Version Packages 2025-03-07 17:44:26 +00:00
hzrd149
023e03ec49 rename package to nsite-gateway 2025-03-07 17:40:30 +00:00
hzrd149
13f5b2ce20
Merge pull request #10 from hzrd149/changeset-release/master
Version Packages
2025-03-04 04:29:25 -06:00
github-actions[bot]
3f218e9765 Version Packages 2025-03-04 10:28:05 +00:00
hzrd149
c4cfa61c76
Update LICENSE year 2025-03-04 04:27:47 -06:00
hzrd149
3747037f89 add license file 2025-03-04 04:26:47 -06:00
hzrd149
b781b7dfe4
Merge pull request #8 from hzrd149/changeset-release/master
Version Packages
2025-01-22 12:27:11 -06:00
github-actions[bot]
373e8fb1cd Version Packages 2025-01-22 18:13:58 +00:00
hzrd149
7b6e5560e6 small fix for logging 2025-01-22 12:13:38 -06:00
hzrd149
c84396ed62 Add option to download another nsite as a homepage
Replace homepage with simple welcome page
2025-01-22 12:09:06 -06:00
hzrd149
c4eae33451 login to npn first 2025-01-22 10:52:04 -06:00
hzrd149
e845d43a7a small fix 2025-01-22 10:47:15 -06:00
hzrd149
e34b52f0dc add publish next action 2025-01-22 10:39:28 -06:00
hzrd149
2ac847f3b1 Add colors to logging 2025-01-22 10:32:12 -06:00
hzrd149
5be0822410 Fix serving hidden files in .well-known 2025-01-22 08:51:46 -06:00
hzrd149
b02f15242c
Merge pull request #7 from hzrd149/changeset-release/master
Version Packages
2025-01-06 12:44:39 -06:00
github-actions[bot]
07869fcb4a Version Packages 2025-01-06 18:43:26 +00:00
hzrd149
6704516336 Fix package missing build folder 2025-01-06 12:43:02 -06:00
hzrd149
a2644629ba
Merge pull request #6 from hzrd149/changeset-release/master
Version Packages
2024-12-18 10:32:24 -06:00
github-actions[bot]
f42546677a Version Packages 2024-12-18 15:53:57 +00:00
hzrd149
ba71f35593 update readme 2024-12-18 09:53:29 -06:00
hzrd149
3853ab4f96
Merge pull request #5 from hzrd149/changeset-release/master
Version Packages
2024-10-18 11:31:23 +01:00
github-actions[bot]
21744469cd Version Packages 2024-10-18 10:30:02 +00:00
hzrd149
db172d4d0a Add support for custom 404.html pages 2024-10-18 11:29:29 +01:00
hzrd149
cb3b694074
Merge pull request #4 from hzrd149/changeset-release/master
Version Packages
2024-10-07 11:19:00 -05:00
github-actions[bot]
fbf1f1a28a Version Packages 2024-10-07 16:18:49 +00:00
hzrd149
06a407d28a add proxy env variables to example 2024-10-07 11:18:23 -05:00
hzrd149
7c3c9c0d6c Add ONION_HOST env variable 2024-10-06 10:01:15 -05:00
hzrd149
7dfcff462b
Merge pull request #3 from hzrd149/changeset-release/master
Version Packages
2024-10-04 13:04:43 -05:00
github-actions[bot]
381524f5e9 Version Packages 2024-10-04 17:31:46 +00:00
hzrd149
68734ed478 fix small issue with requests 2024-10-04 12:31:19 -05:00
hzrd149
78836cb752 small fixes 2024-10-04 12:17:30 -05:00
hzrd149
048696f155 small fixes 2024-10-04 12:05:47 -05:00
hzrd149
fb3541259c hide thumbnail if not enabled 2024-10-04 11:59:50 -05:00
hzrd149
20de824ce8 fix chrome install on docker
use --no-sandbox for google chrome
2024-10-04 11:46:17 -05:00
hzrd149
f25e2409e9 Add screenshots for nsites 2024-10-04 10:32:57 -05:00
hzrd149
1d3c9e1f6a add manual tor and i2p proxy env variables 2024-10-01 12:56:54 -05:00
hzrd149
d1d20558ff fix docker build 2024-10-01 12:30:43 -05:00
hzrd149
145f89d3d1 Add support for ALL_PROXY env variable 2024-10-01 12:28:34 -05:00
hzrd149
87fecfc2e3 Update landing page 2024-09-26 16:03:28 -05:00
hzrd149
a03b2719c1
Delete .env 2024-09-26 13:14:03 -05:00
hzrd149
f512280de4
Merge pull request #2 from hzrd149/changeset-release/master
Version Packages
2024-09-26 13:02:40 -05:00
github-actions[bot]
ba9b7c71aa Version Packages 2024-09-26 17:48:38 +00:00
hzrd149
b7b43cff10 add nginx cache invalidation
bundle nginx in docker image
switch from ndk to nostr-tools
2024-09-26 12:48:13 -05:00
hzrd149
88a9229633 fix logging around relays 2024-09-26 09:06:08 -05:00
hzrd149
a367dee259 only use $uri as cache key 2024-09-26 08:59:09 -05:00
hzrd149
48ae3882a3 Use users relays when searching for blossom servers 2024-09-26 08:51:32 -05:00
hzrd149
b53e9c4dc6 fix upload page 2024-09-25 16:38:38 -05:00
hzrd149
4a8145aaa8 add readme 2024-09-25 16:18:50 -05:00
hzrd149
b554843d92 small fixes 2024-09-25 16:06:51 -05:00
hzrd149
a62ccea79c remove pages pipeline 2024-09-25 15:37:23 -05:00
39 changed files with 3186 additions and 1192 deletions

2
.env
View File

@ -1,2 +0,0 @@
CACHE_PATH="in-memory"
LOOKUP_RELAYS=wss://user.kindpag.es,wss://purplepag.es

View File

@ -2,14 +2,40 @@
# can be in-memory, redis:// or sqlite://
CACHE_PATH="in-memory"
# A list of nostr relays to search
NOSTR_RELAYS=wss://nos.lol,wss://relay.damus.io
# How long to keep cached data (in seconds)
CACHE_TIME=3600
# A list of relays to find users relay lists (10002) and blossom servers (10063)
LOOKUP_RELAYS=wss://user.kindpag.es,wss://purplepag.es
# A list of nostr relays to listen to for new nsite events
SUBSCRIPTION_RELAYS=wss://nos.lol,wss://relay.damus.io
# A list of fallback blossom servers
BLOSSOM_SERVERS=https://cdn.satellite.earth
BLOSSOM_SERVERS="https://nostr.download,https://cdn.satellite.earth"
# The max file size to serve
MAX_FILE_SIZE='2 MB'
MAX_FILE_SIZE="2 MB"
# the hostname or ip of the upstream nginx proxy cache
NGINX_HOST='nginx'
# A nprofile pointer for an nsite to use as the default homepage
# Setting this will override anything in the ./public folder
NSITE_HOMEPAGE=""
# a local directory to download the homepage to
NSITE_HOMEPAGE_DIR="public"
# The public domain of the gateway (optional) (used to detect when to show the nsite homepage)
PUBLIC_DOMAIN="nsite.gateway.com"
# The nip-05 domain to use for name resolution
# NIP05_NAME_DOMAINS="example.com,nostr.other.site"
# If this is set, nsite will return the 'Onion-Location' header in responses
# ONION_HOST=https://<hostname>.onion
# Use a proxy auto config
# PAC_PROXY="file:///path/to/proxy.pac"
# Or set tor and i2p proxies separately
# I2P_PROXY="127.0.0.1:4447"
# TOR_PROXY="127.0.0.1:9050"

View File

@ -1,56 +0,0 @@
name: GitHub Pages
on:
# Runs on pushes targeting the default branch
push:
tags:
- v*.*.*
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
concurrency:
group: "pages"
cancel-in-progress: false
jobs:
# Single deploy job since we're just deploying
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
steps:
- name: Checkout Repo
uses: actions/checkout@v4
- uses: pnpm/action-setup@v4
- name: Setup Node.js 20
uses: actions/setup-node@v4
with:
node-version: 20
cache: "pnpm"
- name: Build
run: pnpm run docs
- name: Setup Pages
uses: actions/configure-pages@v5
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: "./docs"
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4

View File

@ -26,10 +26,10 @@ jobs:
- uses: pnpm/action-setup@v4
- name: Setup Node.js 20
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20
node-version-file: .nvmrc
cache: "pnpm"
- name: Install Dependencies

1
.gitignore vendored
View File

@ -3,3 +3,4 @@ build
.env
data
.netrc

1
.nvmrc Normal file
View File

@ -0,0 +1 @@
22

14
.vscode/launch.json vendored
View File

@ -14,6 +14,20 @@
"env": {
"DEBUG": "nsite,nsite:*"
}
},
{
"name": "dev-proxy",
"type": "node",
"request": "launch",
"args": ["./src/index.ts"],
"runtimeArgs": ["--loader", "@swc-node/register/esm"],
"cwd": "${workspaceRoot}",
"protocol": "inspector",
"internalConsoleOptions": "openOnSessionStart",
"outputCapture": "std",
"env": {
"DEBUG": "nsite,nsite:*"
}
}
]
}

View File

@ -1,4 +1,98 @@
# nsite-ts
# nsite-gateway
## 1.0.1
### Patch Changes
- 1473eee: Fix returning setup page when event can't be found for pubkey
## 1.0.0
### Major Changes
- ef5262f: Remove screenshots feature
- ef5262f: Remove nginx cache invalidations
### Minor Changes
- b37664b: Cleanup DNS pubkey resolution
- 9a04f63: Add support for resolving NIP-05 names on set domains
- b2b8e01: Make blossom requests in parallel
### Patch Changes
- ef5262f: Fix race condition when streaming blob
## 0.7.0
### Minor Changes
- 023e03e: Rename package to nsite-gateway
## 0.6.1
### Patch Changes
- 3747037: Add license file
## 0.6.0
### Minor Changes
- c84396e: Replace homepage with simple welcome page
- c84396e: Add option to download another nsite as a homepage
- 2ac847f: Add colors to logging
### Patch Changes
- 5be0822: Fix serving hidden files in .well-known
## 0.5.2
### Patch Changes
- 6704516: Fix package missing build folder
## 0.5.1
### Patch Changes
- ba71f35: bump dependnecies
## 0.5.0
### Minor Changes
- db172d4: Add support for custom 404.html pages
## 0.4.0
### Minor Changes
- 7c3c9c0: Add ONION_HOST env variable
## 0.3.0
### Minor Changes
- 145f89d: Add support for ALL_PROXY env variable
- f25e240: Add screenshots for nsites
### Patch Changes
- 87fecfc: Update landing page
## 0.2.0
### Minor Changes
- b7b43cf: Bundle nginx in docker image
- b7b43cf: Add NGINX_CACHE_DIR for invalidating nginx cache
- b7b43cf: Add SUBSCRIPTION_RELAYS for listening for new events
### Patch Changes
- 48ae388: Use users relays when searching for blossom servers
## 0.1.0

8
Caddyfile Normal file
View File

@ -0,0 +1,8 @@
#{
# email your-email@example.com
#}
# This will match example.com and all its subdomains (*.example.com)
example.com, *.example.com {
reverse_proxy nsite:3000
}

View File

@ -1,5 +1,4 @@
# syntax=docker/dockerfile:1
FROM node:20-alpine AS base
FROM node:22-alpine AS base
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
@ -19,12 +18,18 @@ COPY src ./src
RUN pnpm build
FROM base AS main
# Setup user
RUN addgroup -S nsite && adduser -S nsite -G nsite
RUN chown -R nsite:nsite /app
# setup nsite
COPY --from=prod-deps /app/node_modules /app/node_modules
COPY --from=build ./app/build ./build
COPY ./public ./public
EXPOSE 80
ENV PORT="80"
EXPOSE 3000
ENV NSITE_PORT="3000"
ENTRYPOINT [ "node", "." ]
CMD ["node", "."]

21
LICENSE Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2025 hzrd149
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

77
README.md Normal file
View File

@ -0,0 +1,77 @@
# nsite-gateway
A Typescript implementation of [static websites on nostr](https://github.com/nostr-protocol/nips/pull/1538)
## Configuring
All configuration is done through the `.env` file. start by copying the example file and modifying it.
```sh
cp .env.example .env
```
## Running with npx
```sh
npx nsite-gateway
```
## Running with docker-compose
```sh
git clone https://github.com/hzrd149/nsite-gateway.git
cd nsite-gateway
docker compose up
```
Once the service is running you can access the gateway at `http://localhost:3000`
## Running with docker
The `ghcr.io/hzrd149/nsite-gateway` image can be used to run a http instance locally
```sh
docker run --rm -it --name nsite -p 3000:3000 ghcr.io/hzrd149/nsite-gateway
```
## Tor setup
First you need to install tor (`sudo apt install tor` on debian systems) or [Documentation](https://community.torproject.org/onion-services/setup/install/)
Then able the tor service
```sh
sudo systemctl enable tor
sudo systemctl start tor
```
### Setup hidden service
Modify the torrc file to enable `HiddenServiceDir` and `HiddenServicePort`
```
HiddenServiceDir /var/lib/tor/hidden_service/
HiddenServicePort 80 127.0.0.1:8080
```
Then restart tor
```sh
sudo systemctl restart tor
```
Next get the onion address using `cat /var/lib/tor/hidden_service/hostname` and set the `ONION_HOST` variable in the `.env` file
```sh
# don't forget to start with http://
ONION_HOST="http://q457mvdt5smqj726m4lsqxxdyx7r3v7gufzt46zbkop6mkghpnr7z3qd.onion"
```
### Connecting to Tor and I2P relays and blossom servers
Install Tor ([Documentation](https://community.torproject.org/onion-services/setup/install/)) and optionally I2Pd ([Documentation](https://i2pd.readthedocs.io/en/latest/user-guide/install/)) and then add the `TOR_PROXY` and `I2P_PROXY` variables to the `.env` file
```sh
TOR_PROXY=127.0.0.1:9050
I2P_PROXY=127.0.0.1:4447
```

13
contrib/nsite.service Normal file
View File

@ -0,0 +1,13 @@
[Unit]
Description=nsite Server
After=network.target
[Service]
Type=simple
WorkingDirectory=/<path-to>/nsite-gateway
ExecStart=/usr/bin/node .
Restart=always
RestartSec=10
[Install]
WantedBy=multi-user.target

View File

@ -1,22 +1,36 @@
version: "3.7"
volumes:
cache: {}
services:
nginx:
image: nginx:alpine
ports:
- 8080:80
redis:
image: redis:alpine
restart: unless-stopped
command: redis-server --save 60 1 --loglevel warning
volumes:
- cache:/var/cache/nginx
- ./nginx/nginx.conf:/etc/nginx/nginx.conf
- ./nginx/default.conf:/etc/nginx/conf.d/default.conf
- redis-data:/data
nsite:
build: .
image: ghcr.io/hzrd149/nsite-ts:master
image: ghcr.io/hzrd149/nsite-gateway:master
restart: unless-stopped
environment:
LOOKUP_RELAYS: wss://user.kindpag.es,wss://purplepag.es
SUBSCRIPTION_RELAYS: wss://nostrue.com/,wss://nos.lol/,wss://relay.damus.io/,wss://purplerelay.com/
CACHE_PATH: redis://redis:6379
depends_on:
- redis
caddy:
image: caddy:alpine
restart: unless-stopped
ports:
- 3000:80
- "80:80"
- "443:443"
volumes:
- ./Caddyfile:/etc/caddy/Caddyfile:ro
- caddy_data:/data
- caddy_config:/config
depends_on:
- nsite
volumes:
redis-data:
caddy_data:
caddy_config:

View File

@ -1,28 +0,0 @@
server {
listen 80;
server_name nsite-proxy;
location / {
proxy_cache request_cache;
proxy_cache_valid 200 60m;
proxy_cache_valid 404 10m;
proxy_cache_key $scheme$host$request_uri;
proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504;
add_header X-Cache $upstream_cache_status;
add_header X-Cache-Status $upstream_status;
expires 30d;
add_header Cache-Control "public, no-transform";
proxy_set_header Host $host;
proxy_pass http://nsite;
}
# Manual cache invalidation ( cant use proxy_cache_purge )
# location ~ /purge(/.*) {
# allow 127.0.0.1;
# deny all;
# proxy_cache_purge request_cache $scheme$proxy_host$1;
# }
}

View File

@ -1,33 +0,0 @@
user nginx;
worker_processes auto;
error_log /var/log/nginx/error.log notice;
pid /var/run/nginx.pid;
events {
worker_connections 1024;
}
http {
include /etc/nginx/mime.types;
default_type application/octet-stream;
# add custom cache
proxy_cache_path /var/cache/nginx levels=1:2 keys_zone=request_cache:10m max_size=10g inactive=60m use_temp_path=off;
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
sendfile on;
#tcp_nopush on;
keepalive_timeout 65;
gzip on;
include /etc/nginx/conf.d/*.conf;
}

View File

@ -1,6 +1,6 @@
{
"name": "nsite-ts",
"version": "0.1.0",
"name": "nsite-gateway",
"version": "1.0.1",
"description": "A blossom server implementation written in Typescript",
"main": "build/index.js",
"type": "module",
@ -8,6 +8,7 @@
"license": "MIT",
"scripts": {
"start": "node build/index.js",
"prepack": "tsc",
"build": "tsc",
"dev": "nodemon -i '**/data/**' --exec 'node' --loader @swc-node/register/esm src/index.ts",
"format": "prettier -w ."
@ -18,39 +19,48 @@
"public"
],
"dependencies": {
"@keyv/redis": "^3.0.1",
"@keyv/redis": "^4.3.2",
"@keyv/sqlite": "^4.0.1",
"@koa/cors": "^5.0.0",
"@nostr-dev-kit/ndk": "^2.10.0",
"blossom-client-sdk": "^1.1.1",
"dotenv": "^16.4.5",
"follow-redirects": "^1.15.6",
"keyv": "^5.0.1",
"koa": "^2.15.3",
"blossom-client-sdk": "^3.0.1",
"debug": "^4.4.0",
"dotenv": "^16.4.7",
"follow-redirects": "^1.15.9",
"keyv": "^5.3.2",
"koa": "^2.16.0",
"koa-morgan": "^1.0.1",
"koa-send": "^5.0.1",
"koa-static": "^5.0.0",
"mime": "^4.0.4",
"nostr-tools": "^2.7.2",
"websocket-polyfill": "^1.0.0",
"ws": "^8.18.0",
"mime": "^4.0.7",
"nostr-tools": "^2.12.0",
"nsite-cli": "^0.1.16",
"pac-proxy-agent": "^7.2.0",
"proxy-agent": "^6.5.0",
"websocket-polyfill": "1.0.0",
"ws": "^8.18.1",
"xbytes": "^1.9.1"
},
"devDependencies": {
"@changesets/cli": "^2.27.8",
"@swc-node/register": "^1.9.0",
"@swc/core": "^1.5.0",
"@types/better-sqlite3": "^7.6.9",
"@changesets/cli": "^2.28.1",
"@swc-node/register": "^1.10.10",
"@swc/core": "^1.11.16",
"@types/better-sqlite3": "^7.6.13",
"@types/debug": "^4.1.12",
"@types/follow-redirects": "^1.14.4",
"@types/koa": "^2.14.0",
"@types/koa": "^2.15.0",
"@types/koa-morgan": "^1.0.8",
"@types/koa-send": "^4.1.6",
"@types/koa-static": "^4.0.4",
"@types/koa__cors": "^5.0.0",
"@types/koa__router": "^12.0.4",
"@types/node": "^20.11.19",
"@types/ws": "^8.5.10",
"nodemon": "^3.0.3",
"prettier": "^3.3.3",
"typescript": "^5.3.3"
"@types/node": "^20.17.30",
"@types/proxy-from-env": "^1.0.4",
"@types/ws": "^8.18.1",
"esbuild": "^0.25.2",
"nodemon": "^3.1.9",
"pkg": "^5.8.1",
"prettier": "^3.5.3",
"typescript": "^5.8.3"
},
"resolutions": {
"websocket-polyfill": "1.0.0"

2723
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

51
public/404.html Normal file
View File

@ -0,0 +1,51 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>404 - Page Not Found</title>
<style>
body {
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif;
line-height: 1.6;
color: #333;
max-width: 800px;
margin: 40px auto;
padding: 0 20px;
background-color: #f5f5f5;
}
.container {
background-color: white;
padding: 30px;
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
}
h1 {
color: #2c3e50;
margin-bottom: 20px;
}
.info {
background-color: #f8f9fa;
border-left: 4px solid #dc3545;
padding: 15px;
margin: 20px 0;
}
</style>
</head>
<body>
<div class="container">
<h1>404 - Page Not Found</h1>
<div class="info">
<p>We couldn't find an nsite for this domain.</p>
<p>This could mean either:</p>
<ul>
<li>The domain is not configured to point to an nsite</li>
</ul>
</div>
<p>
For more information about setting up an nsite, please refer to the
<a href="https://github.com/hzrd149/nsite-gateway">documentation</a>
</p>
</div>
</body>
</html>

BIN
public/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

View File

@ -3,30 +3,65 @@
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>nsite</title>
<script type="importmap">
{
"imports": {
"blossom-client-sdk": "https://esm.run/blossom-client-sdk",
"nostr-tools": "https://esm.run/nostr-tools"
<title>Welcome to nsite-gateway</title>
<style>
body {
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif;
line-height: 1.6;
color: #333;
max-width: 800px;
margin: 40px auto;
padding: 0 20px;
background-color: #f5f5f5;
}
.container {
background-color: white;
padding: 30px;
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
}
</script>
h1 {
color: #2c3e50;
margin-bottom: 20px;
}
.info {
background-color: #f8f9fa;
border-left: 4px solid #007bff;
padding: 15px;
margin: 20px 0;
}
code {
background-color: #f1f1f1;
padding: 2px 6px;
border-radius: 3px;
font-family: Monaco, monospace;
user-select: all;
word-break: break-all;
}
</style>
</head>
<body>
<h1>nsite-ts</h1>
<a href="https://github.com/hzrd149/nsite-ts" target="_blank">Source Code</a>
<div class="container">
<h1>Welcome to nsite-gateway</h1>
<p>If you're seeing this page, nsite-gateway has been successfully installed and is working.</p>
<h2>Latest nsites:</h2>
<div id="sites"></div>
<template id="site">
<div class="site">
<a class="pubkey link"></a>
<span class="date"></span>
<div class="info">
<p>
To set a custom homepage, set the <code>NSITE_HOMEPAGE</code> environment variable to your desired nprofile
</p>
<p>
Example:
<br />
<code
>NSITE_HOMEPAGE=nprofile1qqspspfsrjnurtf0jdyswm8jstustv7pu4qw3pn4u99etptvgzm4uvcpz9mhxue69uhkummnw3e82efwvdhk6qg5waehxw309aex2mrp0yhxgctdw4eju6t04mzfem</code
>
</p>
</div>
</template>
<script type="module" src="./main.js"></script>
<p>
For more information about configuring nsite-gateway, please refer to the
<a href="https://github.com/hzrd149/nsite-gateway">documentation</a>
</p>
</div>
</body>
</html>

View File

@ -1,32 +0,0 @@
import { nip19, SimplePool } from "nostr-tools";
const seen = new Set();
function addSite(event) {
if (seen.has(event.pubkey)) return;
seen.add(event.pubkey);
try {
const template = document.getElementById("site");
const site = template.content.cloneNode(true);
const npub = nip19.npubEncode(event.pubkey);
site.querySelector(".pubkey").textContent = npub;
site.querySelector(".link").href = new URL("/", `${location.protocol}//${npub}.${location.host}`).toString();
document.getElementById("sites").appendChild(site);
} catch (error) {
console.log("Failed to add site", event);
console.log(error);
}
}
const pool = new SimplePool();
console.log("Loading sites");
pool.subscribeMany(
["wss://relay.damus.io", "wss://nos.lol", "wss://nostr.wine"],
[{ kinds: [34128], "#d": ["/index.html"] }],
{
onevent: addSite,
},
);

View File

@ -1,46 +0,0 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>nsite</title>
<script type="importmap">
{
"imports": {
"blossom-client-sdk": "https://esm.run/blossom-client-sdk",
"nostr-tools": "https://esm.run/nostr-tools"
}
}
</script>
</head>
<body>
<label>relays</label>
<br />
<textarea type="text" id="relays" cols="50" rows="4"></textarea>
<br />
<br />
<label>blossom servers</label>
<br />
<textarea type="text" id="servers" cols="50" rows="4"></textarea>
<br />
<br />
<input type="file" id="files" webkitdirectory directory multiple />
<button id="upload-button">Upload nsite</button>
<div
id="log"
style="
max-height: 50em;
max-width: 80em;
width: 100%;
border: 1px solid gray;
min-height: 8em;
margin: 0.5em 0;
overflow: auto;
font-size: 0.8em;
gap: 0.1em;
white-space: pre;
"
></div>
<script type="module" src="./upload.js"></script>
</body>
</html>

View File

@ -1,142 +0,0 @@
import { multiServerUpload, BlossomClient } from "blossom-client-sdk";
import { SimplePool } from "nostr-tools";
const logContainer = document.getElementById("log");
function log(...args) {
const el = document.createElement("div");
el.innerText = args.join(" ");
logContainer.appendChild(el);
}
const uploadButton = document.getElementById("upload-button");
/** @type {HTMLInputElement} */
const filesInput = document.getElementById("files");
/**
* @param {FileSystemFileEntry} fileEntry
* @returns {File}
*/
export function readFileSystemFile(fileEntry) {
return new Promise((res, rej) => {
fileEntry.file(
(file) => res(file),
(err) => rej(err),
);
});
}
/**
* @param {FileSystemDirectoryEntry} directory
* @returns {FileSystemEntry[]}
*/
export function readFileSystemDirectory(directory) {
return new Promise((res, rej) => {
directory.createReader().readEntries(
(entries) => res(entries),
(err) => rej(err),
);
});
}
/**
* uploads a file system entry to blossom servers
* @param {FileSystemEntry} entry
* @returns {{file: File, path: string, sha256: string}[]}
*/
async function readFileSystemEntry(entry) {
const files = [];
if (entry instanceof FileSystemFileEntry && entry.isFile) {
try {
const file = await readFileSystemFile(entry);
const sha256 = await BlossomClient.getFileSha256(file);
const path = entry.fullPath;
files.push({ file, path, sha256 });
} catch (e) {
log("Failed to add" + entry.fullPath);
log(e.message);
}
} else if (entry instanceof FileSystemDirectoryEntry && entry.isDirectory) {
const entries = await readFileSystemDirectory(entry);
for (const e of entries) files.push(...(await readFileSystemEntry(e)));
}
return files;
}
/**
* uploads a file system entry to blossom servers
* @param {FileList} list
* @returns {{file: File, path: string, sha256: string}[]}
*/
async function readFileList(list) {
const files = [];
for (const file of list) {
const path = file.webkitRelativePath ? file.webkitRelativePath : file.name;
const sha256 = await BlossomClient.getFileSha256(file);
files.push({ file, path, sha256 });
}
return files;
}
const pool = new SimplePool();
/**
* uploads a file system entry to blossom servers
* @param {{file:File, path:string}} files
* @param {import("blossom-client-sdk").Signer} signer
* @param {*} auth
* @param {string[]} servers
* @param {string[]} relays
*/
async function uploadFiles(files, signer, auth, servers, relays) {
for (const { file, path, sha256 } of files) {
try {
const upload = multiServerUpload(servers, file, signer, auth);
let published = false;
for await (let { blob } of upload) {
if (!published) {
const signed = await signer({
kind: 34128,
content: "",
created_at: Math.round(Date.now() / 1000),
tags: [
["d", path],
["x", sha256],
],
});
await pool.publish(relays, signed);
log("Published", path, sha256, signed.id);
}
}
} catch (error) {
log(`Failed to upload ${path}`, error);
}
}
}
uploadButton.addEventListener("click", async () => {
if (!window.nostr) return alert("Missing NIP-07 signer");
const signer = (draft) => window.nostr.signEvent(draft);
const relays = document.getElementById("relays").value.split(/\n|,/);
const servers = document.getElementById("servers").value.split(/\n|,/);
try {
if (filesInput.files) {
const files = await readFileList(filesInput.files);
// strip leading dir
for (const file of files) file.path = file.path.replace(/^[^\/]+\//, "/");
log(`Found ${files.length} files`);
await uploadFiles(files, signer, undefined, servers, relays);
}
} catch (error) {
alert(`Failed to upload files: ${error.message}`);
}
});

View File

@ -1,35 +1,67 @@
import { getServersFromServerListEvent, USER_BLOSSOM_SERVER_LIST_KIND } from "blossom-client-sdk";
import { IncomingMessage } from "node:http";
import ndk from "./ndk.js";
import { BLOSSOM_SERVERS, MAX_FILE_SIZE } from "./env.js";
import { MAX_FILE_SIZE } from "./env.js";
import { makeRequestWithAbort } from "./helpers/http.js";
import { blobURLs } from "./cache.js";
import logger from "./logger.js";
export async function getUserBlossomServers(pubkey: string) {
const blossomServersEvent = await ndk.fetchEvent([{ kinds: [USER_BLOSSOM_SERVER_LIST_KIND], authors: [pubkey] }]);
const log = logger.extend("blossom");
return blossomServersEvent && getServersFromServerListEvent(blossomServersEvent).map((u) => u.toString());
/** Checks all servers for a blob and returns the URLs */
export async function findBlobURLs(sha256: string, servers: string[]): Promise<string[]> {
const cache = await blobURLs.get(sha256);
if (cache) return cache;
const urls = await Promise.all(
servers.map(async (server) => {
const url = new URL(sha256, server);
const check = await fetch(url, { method: "HEAD" }).catch(() => null);
if (check?.status === 200) return url.toString();
else return null;
}),
);
const filtered = urls.filter((url) => url !== null);
log(`Found ${filtered.length}/${servers.length} URLs for ${sha256}`);
await blobURLs.set(sha256, filtered);
return filtered;
}
// TODO: download the file to /tmp and verify it
export async function downloadFile(sha256: string, servers = BLOSSOM_SERVERS) {
for (const server of servers) {
/** Downloads a file from multiple servers */
export async function streamBlob(sha256: string, servers: string[]): Promise<IncomingMessage | undefined> {
if (servers.length === 0) return undefined;
// First find all available URLs
const urls = await findBlobURLs(sha256, servers);
if (urls.length === 0) return undefined;
// Try each URL sequentially with timeout
for (const urlString of urls) {
const controller = new AbortController();
let res: IncomingMessage | undefined = undefined;
try {
const { response } = await makeRequestWithAbort(new URL(sha256, server));
// Set up timeout to abort after 10s
const timeout = setTimeout(() => {
controller.abort();
}, 10_000);
const url = new URL(urlString);
const response = await makeRequestWithAbort(url, controller);
res = response;
clearTimeout(timeout);
if (!response.statusCode) throw new Error("Missing headers or status code");
const size = response.headers["content-length"];
if (size && parseInt(size) > MAX_FILE_SIZE) {
throw new Error("File too large");
}
if (size && parseInt(size) > MAX_FILE_SIZE) throw new Error("File too large");
if (response.statusCode >= 200 && response.statusCode < 300) {
return response;
} else {
// Consume response data to free up memory
response.resume();
}
if (response.statusCode >= 200 && response.statusCode < 300) return response;
} catch (error) {
// ignore error, try next server
if (res) res.resume();
continue; // Try next URL if this one fails
}
}
}

View File

@ -1,18 +1,19 @@
import Keyv from "keyv";
import pfs from "fs/promises";
import { CACHE_PATH } from "./env.js";
import Keyv, { KeyvOptions } from "keyv";
import { CACHE_PATH, CACHE_TIME } from "./env.js";
import logger from "./logger.js";
import { ParsedEvent } from "./events.js";
try {
await pfs.mkdir("data");
} catch (error) {}
const log = logger.extend("cache");
async function createStore() {
if (!CACHE_PATH || CACHE_PATH === "in-memory") return undefined;
else if (CACHE_PATH.startsWith("redis://")) {
const { default: KeyvRedis } = await import("@keyv/redis");
log(`Using redis cache at ${CACHE_PATH}`);
return new KeyvRedis(CACHE_PATH);
} else if (CACHE_PATH.startsWith("sqlite://")) {
const { default: KeyvSqlite } = await import("@keyv/sqlite");
log(`Using sqlite cache at ${CACHE_PATH}`);
return new KeyvSqlite(CACHE_PATH);
}
}
@ -20,24 +21,49 @@ async function createStore() {
const store = await createStore();
store?.on("error", (err) => {
console.log("Connection Error", err);
log("Connection Error", err);
process.exit(1);
});
const opts = store ? { store } : {};
const json: KeyvOptions = { serialize: JSON.stringify, deserialize: JSON.parse };
const opts: KeyvOptions = store ? { store } : {};
/** pubkey -> blossom servers */
export const userServers = new Keyv({
/** A cache that maps a domain to a pubkey ( domain -> pubkey ) */
export const pubkeyDomains = new Keyv<string | undefined>({
...opts,
...json,
namespace: "domains",
ttl: CACHE_TIME * 1000,
});
/** A cache that maps a pubkey to a set of blossom servers ( pubkey -> servers ) */
export const pubkeyServers = new Keyv<string[] | undefined>({
...opts,
...json,
namespace: "servers",
// cache servers for an hour
ttl: 60 * 60 * 1000,
ttl: CACHE_TIME * 1000,
});
/** pubkey -> relays */
export const userRelays = new Keyv({
/** A cache that maps a pubkey to a set of relays ( pubkey -> relays ) */
export const pubkeyRelays = new Keyv<string[] | undefined>({
...opts,
...json,
namespace: "relays",
// cache relays for an hour
ttl: 60 * 60 * 1000,
ttl: CACHE_TIME * 1000,
});
/** A cache that maps a pubkey + path to sha256 hash of the blob ( pubkey/path -> sha256 ) */
export const pathBlobs = new Keyv<ParsedEvent | undefined>({
...opts,
...json,
namespace: "paths",
ttl: CACHE_TIME * 1000,
});
/** A cache that maps a sha256 hash to a set of URLs that had the blob ( sha256 -> URLs ) */
export const blobURLs = new Keyv<string[] | undefined>({
...opts,
...json,
namespace: "blobs",
ttl: CACHE_TIME * 1000,
});

95
src/dns.ts Normal file
View File

@ -0,0 +1,95 @@
import dns from "node:dns";
import { nip05, nip19 } from "nostr-tools";
import { pubkeyDomains as pubkeyDomains } from "./cache.js";
import logger from "./logger.js";
import { NIP05_NAME_DOMAINS } from "./env.js";
export function getCnameRecords(hostname: string): Promise<string[]> {
return new Promise<string[]>((res, rej) => {
dns.resolveCname(hostname, (err, records) => {
if (err) rej(err);
else res(records);
});
});
}
export function getTxtRecords(hostname: string): Promise<string[][]> {
return new Promise<string[][]>((res, rej) => {
dns.resolveTxt(hostname, (err, records) => {
if (err) rej(err);
else res(records);
});
});
}
function extractPubkeyFromHostname(hostname: string): string | undefined {
const [npub] = hostname.split(".");
if (npub.startsWith("npub")) {
const parsed = nip19.decode(npub);
if (parsed.type !== "npub") throw new Error("Expected npub");
return parsed.data;
}
}
const log = logger.extend("DNS");
export async function resolvePubkeyFromHostname(hostname: string): Promise<string | undefined> {
if (hostname === "localhost") return undefined;
const cached = await pubkeyDomains.get(hostname);
if (cached) return cached;
// check if domain contains an npub
let pubkey = extractPubkeyFromHostname(hostname);
if (!pubkey) {
// try to get npub from CNAME
try {
const cnameRecords = await getCnameRecords(hostname);
for (const cname of cnameRecords) {
const p = extractPubkeyFromHostname(cname);
if (p) {
pubkey = p;
break;
}
}
} catch (error) {}
}
if (!pubkey) {
// Try to get npub from TXT records
try {
const txtRecords = await getTxtRecords(hostname);
for (const txt of txtRecords) {
for (const entry of txt) {
const p = extractPubkeyFromHostname(entry);
if (p) {
pubkey = p;
break;
}
}
}
} catch (error) {}
}
// Try to get npub from NIP-05
if (!pubkey && NIP05_NAME_DOMAINS) {
for (const domain of NIP05_NAME_DOMAINS) {
try {
const [name] = hostname.split(".");
const result = await nip05.queryProfile(name + "@" + domain);
if (result) {
pubkey = result.pubkey;
break;
}
} catch (err) {}
}
}
log(`Resolved ${hostname} to ${pubkey}`);
await pubkeyDomains.set(hostname, pubkey);
return pubkey;
}

View File

@ -1,13 +1,53 @@
import "dotenv/config";
import xbytes from "xbytes";
const LOOKUP_RELAYS = process.env.LOOKUP_RELAYS?.split(",") ?? ["wss://user.kindpag.es/", "wss://purplepag.es/"];
const NOSTR_RELAYS = process.env.NOSTR_RELAYS?.split(",") ?? [];
const BLOSSOM_SERVERS = process.env.BLOSSOM_SERVERS?.split(",") ?? [];
const NSITE_HOMEPAGE = process.env.NSITE_HOMEPAGE;
const NSITE_HOMEPAGE_DIR = process.env.NSITE_HOMEPAGE_DIR || "public";
const LOOKUP_RELAYS = process.env.LOOKUP_RELAYS?.split(",").map((u) => u.trim()) ?? [
"wss://user.kindpag.es/",
"wss://purplepag.es/",
];
const SUBSCRIPTION_RELAYS = process.env.SUBSCRIPTION_RELAYS?.split(",").map((u) => u.trim()) ?? [
"wss://nos.lol",
"wss://relay.damus.io",
];
const BLOSSOM_SERVERS = process.env.BLOSSOM_SERVERS?.split(",").map((u) => u.trim()) ?? [];
const MAX_FILE_SIZE = process.env.MAX_FILE_SIZE ? xbytes.parseSize(process.env.MAX_FILE_SIZE) : Infinity;
const NGINX_HOST = process.env.NGINX_HOST;
const CACHE_PATH = process.env.CACHE_PATH;
const CACHE_TIME = process.env.CACHE_TIME ? parseInt(process.env.CACHE_TIME) : 60 * 60;
export { NOSTR_RELAYS, LOOKUP_RELAYS, BLOSSOM_SERVERS, MAX_FILE_SIZE, NGINX_HOST, CACHE_PATH };
const NIP05_NAME_DOMAINS = process.env.NIP05_NAME_DOMAINS?.split(",").map((d) => d.trim());
const PUBLIC_DOMAIN = process.env.PUBLIC_DOMAIN;
const PAC_PROXY = process.env.PAC_PROXY;
const TOR_PROXY = process.env.TOR_PROXY;
const I2P_PROXY = process.env.I2P_PROXY;
const NSITE_HOST = process.env.NSITE_HOST || "0.0.0.0";
const NSITE_PORT = process.env.NSITE_PORT ? parseInt(process.env.NSITE_PORT) : 3000;
const HOST = `${NSITE_HOST}:${NSITE_PORT}`;
const ONION_HOST = process.env.ONION_HOST;
export {
NSITE_HOMEPAGE,
NSITE_HOMEPAGE_DIR,
SUBSCRIPTION_RELAYS,
LOOKUP_RELAYS,
BLOSSOM_SERVERS,
MAX_FILE_SIZE,
CACHE_PATH,
PAC_PROXY,
TOR_PROXY,
I2P_PROXY,
NSITE_HOST,
NSITE_PORT,
HOST,
ONION_HOST,
CACHE_TIME,
NIP05_NAME_DOMAINS,
PUBLIC_DOMAIN,
};

View File

@ -1,23 +1,26 @@
import { extname, isAbsolute, join } from "path";
import { extname, join } from "path";
import { NSITE_KIND } from "./const.js";
import ndk from "./ndk.js";
import { NDKRelaySet } from "@nostr-dev-kit/ndk";
import { requestEvents } from "./nostr.js";
import { pathBlobs } from "./cache.js";
export type ParsedEvent = {
pubkey: string;
path: string;
sha256: string;
created_at: number;
};
/** Returns all the `d` tags that should be searched for a given path */
export function getSearchPaths(path: string) {
const paths = [path];
// if the path does not have an extension, also look for index.html
if (extname(path) === "") paths.push(join(path, "index.html"));
// also look for relative paths
for (const p of Array.from(paths)) {
if (isAbsolute(p)) paths.push(p.replace(/^\//, ""));
}
return paths.filter((p) => !!p);
}
export function parseNsiteEvent(event: { pubkey: string; tags: string[][] }) {
export function parseNsiteEvent(event: { pubkey: string; tags: string[][]; created_at: number }) {
const path = event.tags.find((t) => t[0] === "d" && t[1])?.[1];
const sha256 = event.tags.find((t) => t[0] === "x" && t[1])?.[1];
@ -26,19 +29,29 @@ export function parseNsiteEvent(event: { pubkey: string; tags: string[][] }) {
pubkey: event.pubkey,
path: join("/", path),
sha256,
created_at: event.created_at,
};
}
export async function getNsiteBlobs(pubkey: string, path: string, relays?: string[]) {
const paths = getSearchPaths(path);
const events = await ndk.fetchEvents(
{ kinds: [NSITE_KIND], "#d": paths, authors: [pubkey] },
{},
relays && NDKRelaySet.fromRelayUrls(relays, ndk, true),
);
/** Returns the first blob found for a given path */
export async function getNsiteBlob(pubkey: string, path: string, relays: string[]): Promise<ParsedEvent | undefined> {
const key = pubkey + path;
return Array.from(events)
const cached = await pathBlobs.get(key);
if (cached) return cached;
// NOTE: hack, remove "/" paths since it breaks some relays
const paths = getSearchPaths(path).filter((p) => p !== "/");
const events = await requestEvents(relays, { kinds: [NSITE_KIND], "#d": paths, authors: [pubkey] });
// Sort the found blobs by the order of the paths array
const options = Array.from(events)
.map(parseNsiteEvent)
.filter((e) => !!e)
.sort((a, b) => paths.indexOf(a.path) - paths.indexOf(b.path));
// Remember the blob for this path
if (options.length > 0) await pathBlobs.set(key, options[0]);
return options[0];
}

View File

@ -1,59 +0,0 @@
import dns from "node:dns";
import { nip19 } from "nostr-tools";
export function getCnameRecords(hostname: string) {
return new Promise<string[]>((res, rej) => {
dns.resolveCname(hostname, (err, records) => {
if (err) rej(err);
else res(records);
});
});
}
export function getTxtRecords(hostname: string) {
return new Promise<string[][]>((res, rej) => {
dns.resolveTxt(hostname, (err, records) => {
if (err) rej(err);
else res(records);
});
});
}
function extractNpubFromHostname(hostname: string) {
const [npub] = hostname.split(".");
if (npub.startsWith("npub")) {
const parsed = nip19.decode(npub);
if (parsed.type !== "npub") throw new Error("Expected npub");
return parsed.data;
}
}
export async function resolveNpubFromHostname(hostname: string) {
// check if domain contains an npub
let pubkey = extractNpubFromHostname(hostname);
if (pubkey) return pubkey;
if (hostname === "localhost") return undefined;
// try to get npub from CNAME or TXT records
try {
const cnameRecords = await getCnameRecords(hostname);
for (const cname of cnameRecords) {
const p = extractNpubFromHostname(cname);
if (p) return p;
}
} catch (error) {}
try {
const txtRecords = await getTxtRecords(hostname);
for (const txt of txtRecords) {
for (const entry of txt) {
const p = extractNpubFromHostname(entry);
if (p) return p;
}
}
} catch (error) {}
}

View File

@ -2,16 +2,20 @@ import { IncomingMessage } from "http";
import followRedirects from "follow-redirects";
const { http, https } = followRedirects;
export function makeRequestWithAbort(url: URL) {
return new Promise<{ response: IncomingMessage; controller: AbortController }>((res, rej) => {
const cancelController = new AbortController();
import agent from "../proxy.js";
export function makeRequestWithAbort(url: URL, controller: AbortController) {
return new Promise<IncomingMessage>((res, rej) => {
controller.signal.addEventListener("abort", () => rej(new Error("Aborted")));
const request = (url.protocol === "https:" ? https : http).get(
url,
{
signal: cancelController.signal,
signal: controller.signal,
agent,
},
(response) => {
res({ response, controller: cancelController });
res(response);
},
);
request.on("error", (err) => rej(err));

View File

@ -8,13 +8,27 @@ import fs from "node:fs";
import { fileURLToPath } from "node:url";
import mime from "mime";
import morgan from "koa-morgan";
import { npubEncode } from "nostr-tools/nip19";
import { nip19 } from "nostr-tools";
import { resolveNpubFromHostname } from "./helpers/dns.js";
import { getNsiteBlobs } from "./events.js";
import { downloadFile, getUserBlossomServers } from "./blossom.js";
import { BLOSSOM_SERVERS } from "./env.js";
import { userRelays, userServers } from "./cache.js";
import { getUserOutboxes } from "./ndk.js";
import { resolvePubkeyFromHostname } from "./dns.js";
import { getNsiteBlob } from "./events.js";
import { streamBlob } from "./blossom.js";
import {
BLOSSOM_SERVERS,
HOST,
NSITE_HOMEPAGE,
NSITE_HOMEPAGE_DIR,
NSITE_HOST,
NSITE_PORT,
ONION_HOST,
PUBLIC_DOMAIN,
SUBSCRIPTION_RELAYS,
} from "./env.js";
import pool, { getUserBlossomServers, getUserOutboxes } from "./nostr.js";
import logger from "./logger.js";
import { watchInvalidation } from "./invalidation.js";
import { NSITE_KIND } from "./const.js";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
@ -41,78 +55,143 @@ app.use(async (ctx, next) => {
} catch (err) {
console.log(err);
ctx.status = 500;
ctx.body = { message: "Something went wrong" };
if (err instanceof Error) ctx.body = { message: err.message };
}
});
// map pubkeys to folders in sites dir
// handle nsite requests
app.use(async (ctx, next) => {
const pubkey = (ctx.state.pubkey = await resolveNpubFromHostname(ctx.hostname));
let pubkey = await resolvePubkeyFromHostname(ctx.hostname);
if (pubkey) {
console.log(`${pubkey}: Fetching relays`);
let fallthrough = false;
if (!pubkey && NSITE_HOMEPAGE && (!PUBLIC_DOMAIN || ctx.hostname === PUBLIC_DOMAIN)) {
const parsed = nip19.decode(NSITE_HOMEPAGE);
// TODO: use the relays in the nprofile
let relays = await userRelays.get<string[] | undefined>(pubkey);
if (!relays) {
relays = await getUserOutboxes(pubkey);
if (relays) await userRelays.set(pubkey, relays);
if (parsed.type === "nprofile") pubkey = parsed.data.pubkey;
else if (parsed.type === "npub") pubkey = parsed.data;
// Fallback to public dir if path cannot be found on the nsite homepage
if (pubkey) fallthrough = true;
}
console.log(`${pubkey}: Searching for ${ctx.path}`);
const blobs = await getNsiteBlobs(pubkey, ctx.path, relays);
if (!pubkey) {
if (fallthrough) return next();
if (blobs.length === 0) {
ctx.status = 404;
ctx.body = "Not Found";
ctx.body = fs.readFileSync(path.resolve(__dirname, "../public/404.html"), "utf-8");
return;
}
let servers = await userServers.get<string[] | undefined>(pubkey);
if (!servers) {
console.log(`${pubkey}: Searching for blossom servers`);
servers = (await getUserBlossomServers(pubkey)) ?? [];
// fetch relays
const relays = (await getUserOutboxes(pubkey)) || [];
await userServers.set(pubkey, servers);
// always check subscription relays
relays.push(...SUBSCRIPTION_RELAYS);
if (relays.length === 0) throw new Error("No relays found");
// fetch servers and events in parallel
let [servers, event] = await Promise.all([
getUserBlossomServers(pubkey, relays).then((s) => s || []),
getNsiteBlob(pubkey, ctx.path, relays).then((e) => {
if (!e) return getNsiteBlob(pubkey, "/404.html", relays);
else return e;
}),
]);
if (!event) {
if (fallthrough) return next();
ctx.status = 404;
ctx.body = `Not Found: no events found\npath: ${ctx.path}\nkind: ${NSITE_KIND}\npubkey: ${pubkey}\nrelays: ${relays.join(", ")}`;
return;
}
// always fetch from additional servers
servers.push(...BLOSSOM_SERVERS);
for (const blob of blobs) {
const res = await downloadFile(blob.sha256, servers);
if (servers.length === 0) throw new Error("Failed to find blossom servers");
if (res) {
const type = mime.getType(blob.path);
if (type) ctx.set("Content-Type", type);
try {
const res = await streamBlob(event.sha256, servers);
if (!res) {
ctx.status = 502;
ctx.body = `Failed to find blob\npath: ${event.path}\nsha256: ${event.sha256}\nservers: ${servers.join(", ")}`;
return;
}
const type = mime.getType(event.path);
if (type) ctx.set("content-type", type);
else if (res.headers["content-type"]) ctx.set("content-type", res.headers["content-type"]);
// pass headers along
if (res.headers["content-length"]) ctx.set("content-length", res.headers["content-length"]);
// set Onion-Location header
if (ONION_HOST) {
const url = new URL(ONION_HOST);
url.hostname = npubEncode(pubkey) + "." + url.hostname;
ctx.set("Onion-Location", url.toString().replace(/\/$/, ""));
}
// add cache headers
ctx.set("ETag", res.headers["etag"] || `"${event.sha256}"`);
ctx.set("Cache-Control", "public, max-age=3600");
ctx.set("Last-Modified", res.headers["last-modified"] || new Date(event.created_at * 1000).toUTCString());
ctx.status = 200;
ctx.body = res;
return;
}
}
} catch (error) {
ctx.status = 500;
ctx.body = "Failed to download blob";
} else await next();
ctx.body = `Failed to stream blob ${event.path}\n${error}`;
return;
}
});
// serve static files from public
try {
const www = path.resolve(process.cwd(), "public");
fs.statSync(www);
app.use(serve(www));
} catch (error) {
const www = path.resolve(__dirname, "../public");
app.use(serve(www));
if (ONION_HOST) {
app.use((ctx, next) => {
// set Onion-Location header if it was not set before
if (!ctx.get("Onion-Location") && ONION_HOST) {
ctx.set("Onion-Location", ONION_HOST);
}
app.listen(process.env.PORT || 3000, () => {
console.log("Started on port", process.env.PORT || 3000);
return next();
});
}
// serve static files from public
const serveOptions: serve.Options = {
hidden: true,
maxAge: 60 * 60 * 1000,
index: "index.html",
};
try {
const www = NSITE_HOMEPAGE_DIR;
fs.statSync(www);
app.use(serve(www, serveOptions));
} catch (error) {
const www = path.resolve(__dirname, "../public");
app.use(serve(www, serveOptions));
}
// start the server
app.listen({ host: NSITE_HOST, port: NSITE_PORT }, () => {
logger("Started on port", HOST);
});
// watch for invalidations
watchInvalidation();
process.on("unhandledRejection", (reason, promise) => {
console.error("Unhandled Rejection at:", promise, "reason:", reason);
});
async function shutdown() {
console.log("Shutting down...");
logger("Shutting down...");
pool.destroy();
process.exit(0);
}

31
src/invalidation.ts Normal file
View File

@ -0,0 +1,31 @@
import { npubEncode } from "nostr-tools/nip19";
import { SUBSCRIPTION_RELAYS } from "./env.js";
import { parseNsiteEvent } from "./events.js";
import pool from "./nostr.js";
import { NSITE_KIND } from "./const.js";
import logger from "./logger.js";
import { pathBlobs } from "./cache.js";
const log = logger.extend("invalidation");
export function watchInvalidation() {
if (SUBSCRIPTION_RELAYS.length === 0) return;
logger(`Listening for new nsite events on: ${SUBSCRIPTION_RELAYS.join(", ")}`);
pool.subscribeMany(SUBSCRIPTION_RELAYS, [{ kinds: [NSITE_KIND], since: Math.round(Date.now() / 1000) - 60 * 60 }], {
onevent: async (event) => {
try {
const parsed = parseNsiteEvent(event);
if (parsed) {
pathBlobs.delete(parsed.pubkey + parsed.path);
log(`Invalidated ${npubEncode(parsed.pubkey) + parsed.path}`);
}
} catch (error) {
console.log(`Failed to invalidate ${event.id}`);
}
},
});
}

8
src/logger.ts Normal file
View File

@ -0,0 +1,8 @@
import debug from "debug";
// enable default logging
if (!debug.enabled("nsite")) debug.enable("nsite,nsite:*");
const logger = debug("nsite");
export default logger;

View File

@ -1,17 +0,0 @@
import NDK from "@nostr-dev-kit/ndk";
import { LOOKUP_RELAYS, NOSTR_RELAYS } from "./env.js";
const ndk = new NDK({
explicitRelayUrls: [...LOOKUP_RELAYS, ...NOSTR_RELAYS],
});
ndk.connect();
export async function getUserOutboxes(pubkey: string) {
const mailboxes = await ndk.fetchEvent({ kinds: [10002], authors: [pubkey] });
if (!mailboxes) return;
return mailboxes.tags.filter((t) => t[0] === "r" && (t[2] === undefined || t[2] === "write")).map((t) => t[1]);
}
export default ndk;

View File

@ -1,26 +0,0 @@
import http from "node:http";
import { NGINX_HOST } from "./env.js";
export function invalidateCache(host: string, path: string) {
if (!NGINX_HOST) return Promise.resolve(false);
return new Promise<boolean>((resolve, reject) => {
const req = http.request(
{
hostname: NGINX_HOST,
method: "GET",
port: 80,
path,
headers: {
Host: host,
},
},
(res) => {
if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300) resolve(true);
else reject(new Error("Failed to invalidate"));
},
);
req.end();
});
}

69
src/nostr.ts Normal file
View File

@ -0,0 +1,69 @@
import { Filter, NostrEvent, SimplePool } from "nostr-tools";
import { getServersFromServerListEvent, USER_BLOSSOM_SERVER_LIST_KIND } from "blossom-client-sdk";
import { LOOKUP_RELAYS } from "./env.js";
import { pubkeyRelays, pubkeyServers } from "./cache.js";
import logger from "./logger.js";
import { npubEncode } from "nostr-tools/nip19";
const pool = new SimplePool();
const log = logger.extend("nostr");
/** Fetches a pubkeys mailboxes from the cache or relays */
export async function getUserOutboxes(pubkey: string) {
const cached = await pubkeyRelays.get(pubkey);
if (cached) return cached;
const mailboxes = await pool.get(LOOKUP_RELAYS, { kinds: [10002], authors: [pubkey] });
if (!mailboxes) return;
const relays = mailboxes.tags
.filter((t) => t[0] === "r" && (t[2] === undefined || t[2] === "write"))
.map((t) => t[1]);
log(`Found ${relays.length} relays for ${npubEncode(pubkey)}`);
await pubkeyRelays.set(pubkey, relays);
await pubkeyRelays.set(pubkey, relays);
return relays;
}
/** Fetches a pubkeys blossom servers from the cache or relays */
export async function getUserBlossomServers(pubkey: string, relays: string[]) {
const cached = await pubkeyServers.get(pubkey);
if (cached) return cached;
const blossomServersEvent = await pool.get(relays, { kinds: [USER_BLOSSOM_SERVER_LIST_KIND], authors: [pubkey] });
const servers = blossomServersEvent
? getServersFromServerListEvent(blossomServersEvent).map((u) => u.toString())
: undefined;
// Save servers if found
if (servers) {
log(`Found ${servers.length} blossom servers for ${npubEncode(pubkey)}`);
await pubkeyServers.set(pubkey, servers);
}
return servers;
}
export function requestEvents(relays: string[], filter: Filter) {
return new Promise<NostrEvent[]>(async (res, rej) => {
const events: NostrEvent[] = [];
await Promise.allSettled(relays.map((url) => pool.ensureRelay(url).catch((e) => {})));
const sub = pool.subscribeMany(relays, [filter], {
onevent: (e) => events.push(e),
oneose: () => sub.close(),
onclose: (reasons) => {
const errs = reasons.filter((r) => r !== "closed by caller");
if (errs.length > 0 && events.length === 0) rej(new Error(errs.join(", ")));
else res(events);
},
});
});
}
export default pool;

View File

@ -1,3 +1,13 @@
import { WebSocket } from "ws";
import { ClientOptions, WebSocket } from "ws";
import { ClientRequestArgs } from "http";
global.WebSocket = global.WebSocket || WebSocket;
import agent from "./proxy.js";
class ProxyWebSocket extends WebSocket {
constructor(address: string | URL, options?: ClientOptions | ClientRequestArgs) {
super(address, { agent, ...options });
}
}
// @ts-expect-error
global.WebSocket = agent ? ProxyWebSocket : WebSocket;

57
src/proxy.ts Normal file
View File

@ -0,0 +1,57 @@
import { ProxyAgent } from "proxy-agent";
import { PacProxyAgent } from "pac-proxy-agent";
import { I2P_PROXY, PAC_PROXY, TOR_PROXY } from "./env.js";
function buildPacURI() {
const statements: string[] = [];
if (I2P_PROXY) {
statements.push(
`
if (shExpMatch(host, "*.i2p"))
{
return "SOCKS5 ${I2P_PROXY}";
}
`.trim(),
);
}
if (TOR_PROXY) {
statements.push(
`
if (shExpMatch(host, "*.onion"))
{
return "SOCKS5 ${TOR_PROXY}";
}
`.trim(),
);
}
statements.push('return "DIRECT";');
const PACFile = `
// SPDX-License-Identifier: CC0-1.0
function FindProxyForURL(url, host)
{
${statements.join("\n")}
}
`.trim();
return "pac+data:application/x-ns-proxy-autoconfig;base64," + btoa(PACFile);
}
function buildProxy() {
if (PAC_PROXY) {
console.log(`Using PAC proxy file`);
return new PacProxyAgent(PAC_PROXY);
} else if (TOR_PROXY || I2P_PROXY) {
if (TOR_PROXY) console.log("Tor connection enabled");
if (I2P_PROXY) console.log("I2P connection enabled");
return new PacProxyAgent(buildPacURI());
} else return new ProxyAgent({ keepAlive: true });
}
const agent = buildProxy();
export default agent;