2025-03-16 22:18:44 +00:00
|
|
|
import { IncomingMessage } from "node:http";
|
2024-09-25 13:37:32 -05:00
|
|
|
|
2025-04-05 15:13:05 +01:00
|
|
|
import { MAX_FILE_SIZE } from "./env.js";
|
2024-09-25 13:37:32 -05:00
|
|
|
import { makeRequestWithAbort } from "./helpers/http.js";
|
2025-04-05 15:13:05 +01:00
|
|
|
import { blobURLs } from "./cache.js";
|
|
|
|
import logger from "./logger.js";
|
2024-09-25 13:37:32 -05:00
|
|
|
|
2025-04-05 15:13:05 +01:00
|
|
|
const log = logger.extend("blossom");
|
2025-03-16 22:18:44 +00:00
|
|
|
|
2025-04-05 15:13:05 +01:00
|
|
|
/** Checks all servers for a blob and returns the URLs */
|
|
|
|
export async function findBlobURLs(sha256: string, servers: string[]): Promise<string[]> {
|
|
|
|
const cache = await blobURLs.get(sha256);
|
|
|
|
if (cache) return cache;
|
|
|
|
|
|
|
|
const urls = await Promise.all(
|
|
|
|
servers.map(async (server) => {
|
2025-03-16 22:18:44 +00:00
|
|
|
const url = new URL(sha256, server);
|
2025-04-05 15:13:05 +01:00
|
|
|
|
|
|
|
const check = await fetch(url, { method: "HEAD" }).catch(() => null);
|
|
|
|
if (check?.status === 200) return url.toString();
|
|
|
|
else return null;
|
|
|
|
}),
|
|
|
|
);
|
|
|
|
|
|
|
|
const filtered = urls.filter((url) => url !== null);
|
|
|
|
|
|
|
|
log(`Found ${filtered.length}/${servers.length} URLs for ${sha256}`);
|
|
|
|
await blobURLs.set(sha256, filtered);
|
|
|
|
return filtered;
|
|
|
|
}
|
|
|
|
|
2025-07-19 18:17:41 -05:00
|
|
|
/** Downloads a file from multiple servers with optional range support */
|
|
|
|
export async function streamBlob(
|
|
|
|
sha256: string,
|
|
|
|
servers: string[],
|
|
|
|
headers?: Record<string, string>,
|
|
|
|
): Promise<IncomingMessage | undefined> {
|
2025-04-05 15:13:05 +01:00
|
|
|
if (servers.length === 0) return undefined;
|
|
|
|
|
|
|
|
// First find all available URLs
|
|
|
|
const urls = await findBlobURLs(sha256, servers);
|
|
|
|
if (urls.length === 0) return undefined;
|
|
|
|
|
|
|
|
// Try each URL sequentially with timeout
|
|
|
|
for (const urlString of urls) {
|
|
|
|
const controller = new AbortController();
|
|
|
|
let res: IncomingMessage | undefined = undefined;
|
|
|
|
|
|
|
|
try {
|
|
|
|
// Set up timeout to abort after 10s
|
|
|
|
const timeout = setTimeout(() => {
|
|
|
|
controller.abort();
|
|
|
|
}, 10_000);
|
|
|
|
|
|
|
|
const url = new URL(urlString);
|
2025-07-19 18:17:41 -05:00
|
|
|
const response = await makeRequestWithAbort(url, controller, headers);
|
2025-04-05 15:13:05 +01:00
|
|
|
res = response;
|
|
|
|
clearTimeout(timeout);
|
|
|
|
|
|
|
|
if (!response.statusCode) throw new Error("Missing headers or status code");
|
|
|
|
|
|
|
|
const size = response.headers["content-length"];
|
|
|
|
if (size && parseInt(size) > MAX_FILE_SIZE) throw new Error("File too large");
|
|
|
|
|
2025-07-19 18:17:41 -05:00
|
|
|
// Accept both 200 (full content) and 206 (partial content) status codes
|
2025-04-05 15:13:05 +01:00
|
|
|
if (response.statusCode >= 200 && response.statusCode < 300) return response;
|
|
|
|
} catch (error) {
|
|
|
|
if (res) res.resume();
|
|
|
|
continue; // Try next URL if this one fails
|
|
|
|
}
|
|
|
|
}
|
2024-09-25 13:37:32 -05:00
|
|
|
}
|