mirror of
https://github.com/DocNR/POWR.git
synced 2025-06-06 18:31:03 +00:00
Fix social feed issues: 1) Extended time range for feeds, 2) Improved database connection handling in SocialFeedCache, 3) Ensured POWR account is included in following feed
This commit is contained in:
parent
9043179643
commit
8c6a7ba810
517
lib/db/services/SocialFeedCache.ts
Normal file
517
lib/db/services/SocialFeedCache.ts
Normal file
@ -0,0 +1,517 @@
|
|||||||
|
// lib/db/services/SocialFeedCache.ts
|
||||||
|
import { SQLiteDatabase } from 'expo-sqlite';
|
||||||
|
import NDK, { NDKEvent, NDKFilter, NDKSubscriptionCacheUsage } from '@nostr-dev-kit/ndk-mobile';
|
||||||
|
import { EventCache } from './EventCache';
|
||||||
|
import { DbService } from '../db-service';
|
||||||
|
import { POWR_EVENT_KINDS } from '@/types/nostr-workout';
|
||||||
|
import { FeedItem } from '@/lib/hooks/useSocialFeed';
|
||||||
|
import { LRUCache } from 'typescript-lru-cache';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Service for caching social feed events
|
||||||
|
* This service provides offline access to social feed data
|
||||||
|
*/
|
||||||
|
export class SocialFeedCache {
|
||||||
|
private db: DbService;
|
||||||
|
private eventCache: EventCache;
|
||||||
|
private ndk: NDK | null = null;
|
||||||
|
|
||||||
|
// Write buffer for database operations
|
||||||
|
private writeBuffer: { query: string; params: any[] }[] = [];
|
||||||
|
private bufferFlushTimer: NodeJS.Timeout | null = null;
|
||||||
|
private bufferFlushTimeout: number = 100; // milliseconds
|
||||||
|
private processingTransaction: boolean = false;
|
||||||
|
private retryCount: number = 0;
|
||||||
|
private maxRetryCount: number = 5;
|
||||||
|
private maxBackoffTime: number = 30000; // 30 seconds max backoff
|
||||||
|
private maxBatchSize: number = 20; // Maximum operations per batch
|
||||||
|
private dbAvailable: boolean = true; // Track database availability
|
||||||
|
|
||||||
|
// LRU cache for tracking known events
|
||||||
|
private knownEventIds: LRUCache<string, number>; // Event ID -> timestamp
|
||||||
|
|
||||||
|
constructor(database: SQLiteDatabase) {
|
||||||
|
this.db = new DbService(database);
|
||||||
|
this.eventCache = new EventCache(database);
|
||||||
|
|
||||||
|
// Initialize LRU cache for known events (limit to 1000 entries)
|
||||||
|
this.knownEventIds = new LRUCache<string, number>({ maxSize: 1000 });
|
||||||
|
|
||||||
|
// Ensure feed_cache table exists
|
||||||
|
this.initializeTable();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the NDK instance
|
||||||
|
* @param ndk NDK instance
|
||||||
|
*/
|
||||||
|
setNDK(ndk: NDK) {
|
||||||
|
this.ndk = ndk;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a database operation to the write buffer
|
||||||
|
* @param query SQL query
|
||||||
|
* @param params Query parameters
|
||||||
|
*/
|
||||||
|
private bufferWrite(query: string, params: any[]) {
|
||||||
|
// Limit buffer size to prevent memory issues
|
||||||
|
if (this.writeBuffer.length >= 1000) {
|
||||||
|
console.warn('[SocialFeedCache] Write buffer is full, dropping oldest operation');
|
||||||
|
this.writeBuffer.shift(); // Remove oldest operation
|
||||||
|
}
|
||||||
|
|
||||||
|
this.writeBuffer.push({ query, params });
|
||||||
|
|
||||||
|
if (!this.bufferFlushTimer) {
|
||||||
|
this.bufferFlushTimer = setTimeout(() => this.flushWriteBuffer(), this.bufferFlushTimeout);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the database is available
|
||||||
|
* @returns True if the database is available
|
||||||
|
*/
|
||||||
|
private isDbAvailable(): boolean {
|
||||||
|
return this.dbAvailable && !!this.db;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Flush the write buffer, executing queued operations in a transaction
|
||||||
|
*/
|
||||||
|
private async flushWriteBuffer() {
|
||||||
|
if (this.writeBuffer.length === 0 || this.processingTransaction) return;
|
||||||
|
|
||||||
|
// Check if database is available
|
||||||
|
if (!this.isDbAvailable()) {
|
||||||
|
console.log('[SocialFeedCache] Database not available, delaying flush');
|
||||||
|
this.scheduleNextFlush(true); // Schedule with backoff
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Take only a batch of operations to process at once
|
||||||
|
const bufferCopy = [...this.writeBuffer].slice(0, this.maxBatchSize);
|
||||||
|
this.writeBuffer = this.writeBuffer.slice(bufferCopy.length);
|
||||||
|
|
||||||
|
this.processingTransaction = true;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Check if we've exceeded the maximum retry count
|
||||||
|
if (this.retryCount > this.maxRetryCount) {
|
||||||
|
console.warn(`[SocialFeedCache] Exceeded maximum retry count (${this.maxRetryCount}), dropping ${bufferCopy.length} operations`);
|
||||||
|
// Reset retry count but don't retry these operations
|
||||||
|
this.retryCount = 0;
|
||||||
|
this.processingTransaction = false;
|
||||||
|
this.scheduleNextFlush();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Increment retry count before attempting transaction
|
||||||
|
this.retryCount++;
|
||||||
|
|
||||||
|
// Execute the transaction
|
||||||
|
await this.db.withTransactionAsync(async () => {
|
||||||
|
for (const { query, params } of bufferCopy) {
|
||||||
|
try {
|
||||||
|
await this.db.runAsync(query, params);
|
||||||
|
} catch (innerError) {
|
||||||
|
// Log individual query errors but continue with other queries
|
||||||
|
console.error(`[SocialFeedCache] Error executing query: ${query}`, innerError);
|
||||||
|
// Don't rethrow to allow other queries to proceed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Success - reset retry count
|
||||||
|
this.retryCount = 0;
|
||||||
|
this.dbAvailable = true; // Mark database as available
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[SocialFeedCache] Error flushing write buffer:', error);
|
||||||
|
|
||||||
|
// Check for database connection errors
|
||||||
|
if (error instanceof Error &&
|
||||||
|
(error.message.includes('closed resource') ||
|
||||||
|
error.message.includes('Database not available'))) {
|
||||||
|
// Mark database as unavailable
|
||||||
|
this.dbAvailable = false;
|
||||||
|
console.warn('[SocialFeedCache] Database connection issue detected, marking as unavailable');
|
||||||
|
|
||||||
|
// Add all operations back to the buffer
|
||||||
|
this.writeBuffer = [...bufferCopy, ...this.writeBuffer];
|
||||||
|
} else {
|
||||||
|
// For other errors, add operations back to the buffer
|
||||||
|
// but only if they're not already there (avoid duplicates)
|
||||||
|
for (const op of bufferCopy) {
|
||||||
|
if (!this.writeBuffer.some(item =>
|
||||||
|
item.query === op.query &&
|
||||||
|
JSON.stringify(item.params) === JSON.stringify(op.params)
|
||||||
|
)) {
|
||||||
|
// Add back to the beginning of the buffer to retry sooner
|
||||||
|
this.writeBuffer.unshift(op);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
this.processingTransaction = false;
|
||||||
|
this.scheduleNextFlush();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Schedule the next buffer flush with optional backoff
|
||||||
|
*/
|
||||||
|
private scheduleNextFlush(withBackoff: boolean = false) {
|
||||||
|
if (this.bufferFlushTimer) {
|
||||||
|
clearTimeout(this.bufferFlushTimer);
|
||||||
|
this.bufferFlushTimer = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.writeBuffer.length > 0) {
|
||||||
|
let delay = this.bufferFlushTimeout;
|
||||||
|
|
||||||
|
if (withBackoff) {
|
||||||
|
// Use exponential backoff based on retry count
|
||||||
|
delay = Math.min(
|
||||||
|
this.bufferFlushTimeout * Math.pow(2, this.retryCount),
|
||||||
|
this.maxBackoffTime
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[SocialFeedCache] Scheduling next flush in ${delay}ms (retry: ${this.retryCount})`);
|
||||||
|
this.bufferFlushTimer = setTimeout(() => this.flushWriteBuffer(), delay);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the feed cache table
|
||||||
|
*/
|
||||||
|
private async initializeTable(): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Create feed_cache table if it doesn't exist
|
||||||
|
await this.db.runAsync(`
|
||||||
|
CREATE TABLE IF NOT EXISTS feed_cache (
|
||||||
|
event_id TEXT NOT NULL,
|
||||||
|
feed_type TEXT NOT NULL,
|
||||||
|
created_at INTEGER NOT NULL,
|
||||||
|
cached_at INTEGER NOT NULL,
|
||||||
|
PRIMARY KEY (event_id, feed_type)
|
||||||
|
)
|
||||||
|
`);
|
||||||
|
|
||||||
|
// Create index for faster queries
|
||||||
|
await this.db.runAsync(`
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_feed_cache_type_time
|
||||||
|
ON feed_cache (feed_type, created_at DESC)
|
||||||
|
`);
|
||||||
|
|
||||||
|
console.log('[SocialFeedCache] Feed cache table initialized');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[SocialFeedCache] Error initializing table:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cache a feed event
|
||||||
|
* @param event NDK event to cache
|
||||||
|
* @param feedType Type of feed (following, powr, global)
|
||||||
|
*/
|
||||||
|
async cacheEvent(event: NDKEvent, feedType: string): Promise<void> {
|
||||||
|
if (!event.id || !event.created_at) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Skip if we've already seen this event with a newer or equal timestamp
|
||||||
|
const existingTimestamp = this.knownEventIds.get(event.id);
|
||||||
|
if (existingTimestamp && existingTimestamp >= event.created_at) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update our in-memory cache
|
||||||
|
this.knownEventIds.set(event.id, event.created_at);
|
||||||
|
|
||||||
|
// Check if event already exists in the event cache
|
||||||
|
const existingEvent = await this.eventCache.getEvent(event.id);
|
||||||
|
|
||||||
|
// If the event doesn't exist in cache, we'll add it
|
||||||
|
if (!existingEvent) {
|
||||||
|
// Buffer the event insert
|
||||||
|
const eventData = {
|
||||||
|
id: event.id,
|
||||||
|
pubkey: event.pubkey || '',
|
||||||
|
kind: event.kind || 0,
|
||||||
|
created_at: event.created_at,
|
||||||
|
content: event.content || '',
|
||||||
|
sig: event.sig || '',
|
||||||
|
tags: event.tags || []
|
||||||
|
};
|
||||||
|
|
||||||
|
// Buffer the event insert
|
||||||
|
this.bufferWrite(
|
||||||
|
`INSERT OR REPLACE INTO nostr_events
|
||||||
|
(id, pubkey, kind, created_at, content, sig, raw_event, received_at)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
|
||||||
|
[
|
||||||
|
eventData.id,
|
||||||
|
eventData.pubkey,
|
||||||
|
eventData.kind,
|
||||||
|
eventData.created_at,
|
||||||
|
eventData.content,
|
||||||
|
eventData.sig,
|
||||||
|
JSON.stringify(eventData),
|
||||||
|
Date.now()
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
// Buffer the tag deletes and inserts
|
||||||
|
this.bufferWrite(
|
||||||
|
'DELETE FROM event_tags WHERE event_id = ?',
|
||||||
|
[eventData.id]
|
||||||
|
);
|
||||||
|
|
||||||
|
if (eventData.tags && eventData.tags.length > 0) {
|
||||||
|
for (let i = 0; i < eventData.tags.length; i++) {
|
||||||
|
const tag = eventData.tags[i];
|
||||||
|
if (tag.length >= 2) {
|
||||||
|
this.bufferWrite(
|
||||||
|
'INSERT INTO event_tags (event_id, name, value, index_num) VALUES (?, ?, ?, ?)',
|
||||||
|
[eventData.id, tag[0], tag[1], i]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always add to feed cache
|
||||||
|
this.bufferWrite(
|
||||||
|
`INSERT OR REPLACE INTO feed_cache
|
||||||
|
(event_id, feed_type, created_at, cached_at)
|
||||||
|
VALUES (?, ?, ?, ?)`,
|
||||||
|
[
|
||||||
|
event.id,
|
||||||
|
feedType,
|
||||||
|
event.created_at,
|
||||||
|
Date.now()
|
||||||
|
]
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[SocialFeedCache] Error caching event:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get cached events for a feed
|
||||||
|
* @param feedType Type of feed (following, powr, global)
|
||||||
|
* @param limit Maximum number of events to return
|
||||||
|
* @param since Timestamp to fetch events since (inclusive)
|
||||||
|
* @param until Timestamp to fetch events until (inclusive)
|
||||||
|
* @returns Array of cached events
|
||||||
|
*/
|
||||||
|
async getCachedEvents(
|
||||||
|
feedType: string,
|
||||||
|
limit: number = 20,
|
||||||
|
since?: number,
|
||||||
|
until?: number
|
||||||
|
): Promise<NDKEvent[]> {
|
||||||
|
try {
|
||||||
|
// Build query
|
||||||
|
let query = `
|
||||||
|
SELECT event_id
|
||||||
|
FROM feed_cache
|
||||||
|
WHERE feed_type = ?
|
||||||
|
`;
|
||||||
|
|
||||||
|
const params: any[] = [feedType];
|
||||||
|
|
||||||
|
if (since) {
|
||||||
|
query += ' AND created_at >= ?';
|
||||||
|
params.push(since);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (until) {
|
||||||
|
query += ' AND created_at <= ?';
|
||||||
|
params.push(until);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Order by created_at descending (newest first)
|
||||||
|
query += ' ORDER BY created_at DESC';
|
||||||
|
|
||||||
|
if (limit) {
|
||||||
|
query += ' LIMIT ?';
|
||||||
|
params.push(limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get event IDs
|
||||||
|
const rows = await this.db.getAllAsync<{ event_id: string }>(query, params);
|
||||||
|
|
||||||
|
// Get full events
|
||||||
|
const events: NDKEvent[] = [];
|
||||||
|
|
||||||
|
for (const row of rows) {
|
||||||
|
const event = await this.eventCache.getEvent(row.event_id);
|
||||||
|
if (event && this.ndk) {
|
||||||
|
// Convert to NDKEvent
|
||||||
|
const ndkEvent = new NDKEvent(this.ndk);
|
||||||
|
if (event.id) {
|
||||||
|
ndkEvent.id = event.id;
|
||||||
|
} else {
|
||||||
|
// Skip events without an ID
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
ndkEvent.pubkey = event.pubkey || '';
|
||||||
|
ndkEvent.kind = event.kind || 0;
|
||||||
|
ndkEvent.created_at = event.created_at || Math.floor(Date.now() / 1000);
|
||||||
|
ndkEvent.content = event.content || '';
|
||||||
|
ndkEvent.sig = event.sig || '';
|
||||||
|
ndkEvent.tags = event.tags || [];
|
||||||
|
|
||||||
|
events.push(ndkEvent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return events;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[SocialFeedCache] Error getting cached events:', error);
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cache a referenced event (quoted content)
|
||||||
|
* @param eventId ID of the referenced event
|
||||||
|
* @param kind Kind of the referenced event
|
||||||
|
*/
|
||||||
|
async cacheReferencedEvent(eventId: string, kind: number): Promise<NDKEvent | null> {
|
||||||
|
if (!this.ndk) return null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Check if already cached
|
||||||
|
const cachedEvent = await this.eventCache.getEvent(eventId);
|
||||||
|
if (cachedEvent) {
|
||||||
|
// Convert to NDKEvent
|
||||||
|
const ndkEvent = new NDKEvent(this.ndk);
|
||||||
|
if (cachedEvent.id) {
|
||||||
|
ndkEvent.id = cachedEvent.id;
|
||||||
|
} else {
|
||||||
|
// Skip events without an ID
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
ndkEvent.pubkey = cachedEvent.pubkey || '';
|
||||||
|
ndkEvent.kind = cachedEvent.kind || 0;
|
||||||
|
ndkEvent.created_at = cachedEvent.created_at || Math.floor(Date.now() / 1000);
|
||||||
|
ndkEvent.content = cachedEvent.content || '';
|
||||||
|
ndkEvent.sig = cachedEvent.sig || '';
|
||||||
|
ndkEvent.tags = cachedEvent.tags || [];
|
||||||
|
|
||||||
|
return ndkEvent;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not cached, try to fetch from network
|
||||||
|
const filter: NDKFilter = {
|
||||||
|
ids: [eventId] as string[],
|
||||||
|
kinds: [kind] as number[],
|
||||||
|
};
|
||||||
|
|
||||||
|
const events = await this.ndk.fetchEvents(filter, {
|
||||||
|
cacheUsage: NDKSubscriptionCacheUsage.CACHE_FIRST
|
||||||
|
});
|
||||||
|
|
||||||
|
if (events.size > 0) {
|
||||||
|
const event = Array.from(events)[0];
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Cache the event
|
||||||
|
await this.eventCache.setEvent({
|
||||||
|
id: event.id,
|
||||||
|
pubkey: event.pubkey || '',
|
||||||
|
kind: event.kind || 0,
|
||||||
|
created_at: event.created_at || Math.floor(Date.now() / 1000),
|
||||||
|
content: event.content || '',
|
||||||
|
sig: event.sig || '',
|
||||||
|
tags: event.tags || []
|
||||||
|
}, true); // Skip if already exists
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[SocialFeedCache] Error caching referenced event:', error);
|
||||||
|
// Continue even if caching fails - we can still return the event
|
||||||
|
}
|
||||||
|
|
||||||
|
return event;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[SocialFeedCache] Error caching referenced event:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a cached event by ID
|
||||||
|
* @param eventId Event ID
|
||||||
|
* @returns Cached event or null
|
||||||
|
*/
|
||||||
|
async getCachedEvent(eventId: string): Promise<NDKEvent | null> {
|
||||||
|
if (!this.ndk) return null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const event = await this.eventCache.getEvent(eventId);
|
||||||
|
if (!event) return null;
|
||||||
|
|
||||||
|
// Convert to NDKEvent
|
||||||
|
const ndkEvent = new NDKEvent(this.ndk);
|
||||||
|
if (event.id) {
|
||||||
|
ndkEvent.id = event.id;
|
||||||
|
} else {
|
||||||
|
// Skip events without an ID
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
ndkEvent.pubkey = event.pubkey || '';
|
||||||
|
ndkEvent.kind = event.kind || 0;
|
||||||
|
ndkEvent.created_at = event.created_at || Math.floor(Date.now() / 1000);
|
||||||
|
ndkEvent.content = event.content || '';
|
||||||
|
ndkEvent.sig = event.sig || '';
|
||||||
|
ndkEvent.tags = event.tags || [];
|
||||||
|
|
||||||
|
return ndkEvent;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[SocialFeedCache] Error getting cached event:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear old cached events
|
||||||
|
* @param maxAgeDays Maximum age in days (default: 7)
|
||||||
|
*/
|
||||||
|
async clearOldCache(maxAgeDays: number = 7): Promise<void> {
|
||||||
|
try {
|
||||||
|
const maxAgeMs = maxAgeDays * 24 * 60 * 60 * 1000;
|
||||||
|
const cutoffTime = Date.now() - maxAgeMs;
|
||||||
|
const cutoffTimestamp = Math.floor(cutoffTime / 1000);
|
||||||
|
|
||||||
|
// Get old event IDs
|
||||||
|
const oldEvents = await this.db.getAllAsync<{ event_id: string }>(
|
||||||
|
`SELECT event_id FROM feed_cache WHERE created_at < ?`,
|
||||||
|
[cutoffTimestamp]
|
||||||
|
);
|
||||||
|
|
||||||
|
// Delete from feed_cache
|
||||||
|
await this.db.runAsync(
|
||||||
|
`DELETE FROM feed_cache WHERE created_at < ?`,
|
||||||
|
[cutoffTimestamp]
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log(`[SocialFeedCache] Cleared ${oldEvents.length} old events from feed cache`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[SocialFeedCache] Error clearing old cache:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create singleton instance
|
||||||
|
let socialFeedCache: SocialFeedCache | null = null;
|
||||||
|
|
||||||
|
export function getSocialFeedCache(database: SQLiteDatabase): SocialFeedCache {
|
||||||
|
if (!socialFeedCache) {
|
||||||
|
socialFeedCache = new SocialFeedCache(database);
|
||||||
|
}
|
||||||
|
return socialFeedCache;
|
||||||
|
}
|
@ -1,9 +1,12 @@
|
|||||||
// hooks/useSocialFeed.ts
|
// hooks/useSocialFeed.ts
|
||||||
import { useState, useEffect, useRef, useCallback } from 'react';
|
import { useState, useEffect, useRef, useCallback, useMemo } from 'react';
|
||||||
import { NDKEvent } from '@nostr-dev-kit/ndk-mobile';
|
import { NDKEvent, NDKSubscriptionCacheUsage } from '@nostr-dev-kit/ndk-mobile';
|
||||||
import { nip19 } from 'nostr-tools';
|
import { nip19 } from 'nostr-tools';
|
||||||
import { SocialFeedService } from '@/lib/social/socialFeedService';
|
import { SocialFeedService } from '@/lib/social/socialFeedService';
|
||||||
import { useNDK } from '@/lib/hooks/useNDK';
|
import { useNDK } from '@/lib/hooks/useNDK';
|
||||||
|
import { SQLiteDatabase } from 'expo-sqlite';
|
||||||
|
import { ConnectivityService } from '@/lib/db/services/ConnectivityService';
|
||||||
|
import { useDatabase } from '@/components/DatabaseProvider';
|
||||||
import {
|
import {
|
||||||
parseWorkoutRecord,
|
parseWorkoutRecord,
|
||||||
parseExerciseTemplate,
|
parseExerciseTemplate,
|
||||||
@ -29,7 +32,7 @@ export type FeedItem = {
|
|||||||
|
|
||||||
export function useSocialFeed(
|
export function useSocialFeed(
|
||||||
options: {
|
options: {
|
||||||
feedType: 'following' | 'powr' | 'global';
|
feedType: 'following' | 'powr' | 'global' | 'profile';
|
||||||
since?: number;
|
since?: number;
|
||||||
until?: number;
|
until?: number;
|
||||||
limit?: number;
|
limit?: number;
|
||||||
@ -38,10 +41,12 @@ export function useSocialFeed(
|
|||||||
}
|
}
|
||||||
) {
|
) {
|
||||||
const { ndk } = useNDK();
|
const { ndk } = useNDK();
|
||||||
|
const db = useDatabase();
|
||||||
const [feedItems, setFeedItems] = useState<FeedItem[]>([]);
|
const [feedItems, setFeedItems] = useState<FeedItem[]>([]);
|
||||||
const [loading, setLoading] = useState(true);
|
const [loading, setLoading] = useState(true);
|
||||||
const [hasMore, setHasMore] = useState(true);
|
const [hasMore, setHasMore] = useState(true);
|
||||||
const [oldestTimestamp, setOldestTimestamp] = useState<number | null>(null);
|
const [oldestTimestamp, setOldestTimestamp] = useState<number | null>(null);
|
||||||
|
const [isOffline, setIsOffline] = useState(false);
|
||||||
|
|
||||||
// Keep track of seen events to prevent duplicates
|
// Keep track of seen events to prevent duplicates
|
||||||
const seenEvents = useRef(new Set<string>());
|
const seenEvents = useRef(new Set<string>());
|
||||||
@ -49,6 +54,45 @@ export function useSocialFeed(
|
|||||||
const subscriptionRef = useRef<{ unsubscribe: () => void } | null>(null);
|
const subscriptionRef = useRef<{ unsubscribe: () => void } | null>(null);
|
||||||
const socialServiceRef = useRef<SocialFeedService | null>(null);
|
const socialServiceRef = useRef<SocialFeedService | null>(null);
|
||||||
|
|
||||||
|
// Subscription cooldown to prevent rapid resubscriptions
|
||||||
|
const subscriptionCooldown = useRef<NodeJS.Timeout | null>(null);
|
||||||
|
const cooldownPeriod = 2000; // 2 seconds
|
||||||
|
const subscriptionAttempts = useRef(0);
|
||||||
|
const maxSubscriptionAttempts = 3;
|
||||||
|
|
||||||
|
// Initialize social service
|
||||||
|
useEffect(() => {
|
||||||
|
if (ndk && !socialServiceRef.current) {
|
||||||
|
try {
|
||||||
|
console.log('[useSocialFeed] Initializing SocialFeedService');
|
||||||
|
socialServiceRef.current = new SocialFeedService(ndk, db);
|
||||||
|
console.log('[useSocialFeed] SocialFeedService initialized successfully');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[useSocialFeed] Error initializing SocialFeedService:', error);
|
||||||
|
// Log more detailed error information
|
||||||
|
if (error instanceof Error) {
|
||||||
|
console.error(`[useSocialFeed] Error details: ${error.message}`);
|
||||||
|
if (error.stack) {
|
||||||
|
console.error(`[useSocialFeed] Stack trace: ${error.stack}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try again after a delay
|
||||||
|
const retryTimer = setTimeout(() => {
|
||||||
|
console.log('[useSocialFeed] Retrying SocialFeedService initialization');
|
||||||
|
try {
|
||||||
|
socialServiceRef.current = new SocialFeedService(ndk, db);
|
||||||
|
console.log('[useSocialFeed] SocialFeedService initialized successfully on retry');
|
||||||
|
} catch (retryError) {
|
||||||
|
console.error('[useSocialFeed] Failed to initialize SocialFeedService on retry:', retryError);
|
||||||
|
}
|
||||||
|
}, 3000);
|
||||||
|
|
||||||
|
return () => clearTimeout(retryTimer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [ndk, db]);
|
||||||
|
|
||||||
// Process event and add to feed
|
// Process event and add to feed
|
||||||
const processEvent = useCallback((event: NDKEvent) => {
|
const processEvent = useCallback((event: NDKEvent) => {
|
||||||
// Skip if we've seen this event before or event has no ID
|
// Skip if we've seen this event before or event has no ID
|
||||||
@ -229,17 +273,7 @@ export function useSocialFeed(
|
|||||||
};
|
};
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case 30024: // Draft long-form content - only show from POWR account
|
// We no longer process kind 30024 (draft articles) in any feed
|
||||||
if (event.pubkey === POWR_PUBKEY_HEX && options.feedType === 'powr') {
|
|
||||||
feedItem = {
|
|
||||||
id: event.id,
|
|
||||||
type: 'article',
|
|
||||||
originalEvent: event,
|
|
||||||
parsedContent: parseLongformContent(event),
|
|
||||||
createdAt: timestamp
|
|
||||||
};
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
|
|
||||||
default:
|
default:
|
||||||
// Ignore other event kinds
|
// Ignore other event kinds
|
||||||
@ -281,46 +315,231 @@ export function useSocialFeed(
|
|||||||
}
|
}
|
||||||
}, [oldestTimestamp, options.feedType]);
|
}, [oldestTimestamp, options.feedType]);
|
||||||
|
|
||||||
|
// Check connectivity status
|
||||||
|
useEffect(() => {
|
||||||
|
const checkConnectivity = async () => {
|
||||||
|
const isOnline = await ConnectivityService.getInstance().checkNetworkStatus();
|
||||||
|
setIsOffline(!isOnline);
|
||||||
|
};
|
||||||
|
|
||||||
|
checkConnectivity();
|
||||||
|
|
||||||
|
// Set up interval to check connectivity
|
||||||
|
const interval = setInterval(checkConnectivity, 10000); // Check every 10 seconds
|
||||||
|
|
||||||
|
return () => clearInterval(interval);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Memoize feed options to prevent unnecessary resubscriptions
|
||||||
|
const feedOptions = useMemo(() => {
|
||||||
|
// Default time ranges based on feed type
|
||||||
|
let defaultTimeRange: number;
|
||||||
|
|
||||||
|
// Use longer time ranges for following and POWR feeds since they have less content
|
||||||
|
switch (options.feedType) {
|
||||||
|
case 'following':
|
||||||
|
case 'powr':
|
||||||
|
// 30 days for following and POWR feeds
|
||||||
|
defaultTimeRange = 30 * 24 * 60 * 60;
|
||||||
|
break;
|
||||||
|
case 'profile':
|
||||||
|
// 60 days for profile feeds
|
||||||
|
defaultTimeRange = 60 * 24 * 60 * 60;
|
||||||
|
break;
|
||||||
|
case 'global':
|
||||||
|
default:
|
||||||
|
// 7 days for global feed
|
||||||
|
defaultTimeRange = 7 * 24 * 60 * 60;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate default since timestamp
|
||||||
|
const defaultSince = Math.floor(Date.now() / 1000) - defaultTimeRange;
|
||||||
|
|
||||||
|
// Only use the provided since if it's explicitly set in options
|
||||||
|
// Otherwise use our default
|
||||||
|
const since = options.since || defaultSince;
|
||||||
|
|
||||||
|
return {
|
||||||
|
feedType: options.feedType,
|
||||||
|
since,
|
||||||
|
until: options.until,
|
||||||
|
limit: options.limit || 30,
|
||||||
|
authors: options.authors,
|
||||||
|
kinds: options.kinds,
|
||||||
|
};
|
||||||
|
}, [options.feedType, options.authors, options.kinds, options.limit, options.since, options.until]);
|
||||||
|
|
||||||
// Load feed data
|
// Load feed data
|
||||||
const loadFeed = useCallback(async () => {
|
const loadFeed = useCallback(async () => {
|
||||||
if (!ndk) return;
|
if (!ndk) return;
|
||||||
|
|
||||||
|
// Prevent rapid resubscriptions
|
||||||
|
if (subscriptionCooldown.current) {
|
||||||
|
console.log('[useSocialFeed] Subscription on cooldown, skipping');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Track subscription attempts to prevent infinite loops
|
||||||
|
subscriptionAttempts.current += 1;
|
||||||
|
if (subscriptionAttempts.current > maxSubscriptionAttempts) {
|
||||||
|
console.error(`[useSocialFeed] Too many subscription attempts (${subscriptionAttempts.current}), giving up`);
|
||||||
|
setLoading(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
|
|
||||||
// Initialize social service if not already done
|
// Initialize social service if not already done
|
||||||
if (!socialServiceRef.current) {
|
if (!socialServiceRef.current) {
|
||||||
socialServiceRef.current = new SocialFeedService(ndk);
|
try {
|
||||||
|
console.log('[useSocialFeed] Initializing SocialFeedService in loadFeed');
|
||||||
|
socialServiceRef.current = new SocialFeedService(ndk, db);
|
||||||
|
console.log('[useSocialFeed] SocialFeedService initialized successfully in loadFeed');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[useSocialFeed] Error initializing SocialFeedService in loadFeed:', error);
|
||||||
|
// Log more detailed error information
|
||||||
|
if (error instanceof Error) {
|
||||||
|
console.error(`[useSocialFeed] Error details: ${error.message}`);
|
||||||
|
if (error.stack) {
|
||||||
|
console.error(`[useSocialFeed] Stack trace: ${error.stack}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
setLoading(false);
|
||||||
|
return; // Exit early if we can't initialize the service
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clean up any existing subscription
|
// Clean up any existing subscription
|
||||||
if (subscriptionRef.current) {
|
if (subscriptionRef.current) {
|
||||||
|
console.log(`[useSocialFeed] Cleaning up existing subscription for ${feedOptions.feedType} feed`);
|
||||||
subscriptionRef.current.unsubscribe();
|
subscriptionRef.current.unsubscribe();
|
||||||
subscriptionRef.current = null;
|
subscriptionRef.current = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Set a cooldown to prevent rapid resubscriptions
|
||||||
|
// Increased from 2 seconds to 5 seconds to reduce subscription frequency
|
||||||
|
subscriptionCooldown.current = setTimeout(() => {
|
||||||
|
subscriptionCooldown.current = null;
|
||||||
|
// Reset attempt counter after cooldown period
|
||||||
|
subscriptionAttempts.current = 0;
|
||||||
|
}, 5000); // Increased cooldown period
|
||||||
|
|
||||||
try {
|
try {
|
||||||
console.log(`Loading ${options.feedType} feed with authors:`, options.authors);
|
console.log(`[useSocialFeed] Loading ${feedOptions.feedType} feed with authors:`, feedOptions.authors);
|
||||||
|
console.log(`[useSocialFeed] Time range: since=${new Date(feedOptions.since * 1000).toISOString()}, until=${feedOptions.until ? new Date(feedOptions.until * 1000).toISOString() : 'now'}`);
|
||||||
|
|
||||||
|
// For following feed, ensure we have authors
|
||||||
|
if (feedOptions.feedType === 'following' && (!feedOptions.authors || feedOptions.authors.length === 0)) {
|
||||||
|
console.log('[useSocialFeed] Following feed with no authors, skipping subscription');
|
||||||
|
setLoading(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build and validate filters before subscribing
|
||||||
|
if (!socialServiceRef.current) {
|
||||||
|
console.error('[useSocialFeed] Social service not initialized');
|
||||||
|
setLoading(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate that we have valid filters before subscribing
|
||||||
|
const filters = socialServiceRef.current.buildFilters({
|
||||||
|
feedType: feedOptions.feedType,
|
||||||
|
since: feedOptions.since,
|
||||||
|
until: feedOptions.until,
|
||||||
|
authors: feedOptions.authors,
|
||||||
|
kinds: feedOptions.kinds
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!filters || Object.keys(filters).length === 0) {
|
||||||
|
console.log('[useSocialFeed] No valid filters to subscribe with, skipping');
|
||||||
|
setLoading(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[useSocialFeed] Subscribing with filters:`, JSON.stringify(filters));
|
||||||
|
|
||||||
// Subscribe to feed
|
// Subscribe to feed
|
||||||
const subscription = await socialServiceRef.current.subscribeFeed({
|
const subscription = await socialServiceRef.current.subscribeFeed({
|
||||||
feedType: options.feedType,
|
feedType: feedOptions.feedType,
|
||||||
since: options.since,
|
since: feedOptions.since,
|
||||||
until: options.until,
|
until: feedOptions.until,
|
||||||
limit: options.limit || 30,
|
limit: feedOptions.limit,
|
||||||
authors: options.authors,
|
authors: feedOptions.authors,
|
||||||
kinds: options.kinds,
|
kinds: feedOptions.kinds,
|
||||||
onEvent: processEvent,
|
onEvent: processEvent,
|
||||||
onEose: () => {
|
onEose: () => {
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (subscription) {
|
||||||
subscriptionRef.current = subscription;
|
subscriptionRef.current = subscription;
|
||||||
} catch (error) {
|
} else {
|
||||||
console.error('Error loading feed:', error);
|
console.error('[useSocialFeed] Failed to create subscription');
|
||||||
setLoading(false);
|
setLoading(false);
|
||||||
}
|
}
|
||||||
}, [ndk, options.feedType, options.since, options.until, options.limit, options.authors, options.kinds, processEvent]);
|
} catch (error) {
|
||||||
|
console.error('[useSocialFeed] Error loading feed:', error);
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}, [ndk, db, feedOptions, processEvent]);
|
||||||
|
|
||||||
|
// Load cached feed data
|
||||||
|
const loadCachedFeed = useCallback(async () => {
|
||||||
|
if (!ndk) return;
|
||||||
|
|
||||||
|
setLoading(true);
|
||||||
|
|
||||||
|
// Initialize social service if not already done
|
||||||
|
if (!socialServiceRef.current) {
|
||||||
|
try {
|
||||||
|
console.log('[useSocialFeed] Initializing SocialFeedService in loadCachedFeed');
|
||||||
|
socialServiceRef.current = new SocialFeedService(ndk, db);
|
||||||
|
console.log('[useSocialFeed] SocialFeedService initialized successfully in loadCachedFeed');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[useSocialFeed] Error initializing SocialFeedService in loadCachedFeed:', error);
|
||||||
|
// Log more detailed error information
|
||||||
|
if (error instanceof Error) {
|
||||||
|
console.error(`[useSocialFeed] Error details: ${error.message}`);
|
||||||
|
if (error.stack) {
|
||||||
|
console.error(`[useSocialFeed] Stack trace: ${error.stack}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
setLoading(false);
|
||||||
|
return; // Exit early if we can't initialize the service
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get cached events from the SocialFeedCache
|
||||||
|
if (socialServiceRef.current) {
|
||||||
|
try {
|
||||||
|
const cachedEvents = await socialServiceRef.current.getCachedEvents(
|
||||||
|
options.feedType,
|
||||||
|
options.limit || 30,
|
||||||
|
options.since,
|
||||||
|
options.until
|
||||||
|
);
|
||||||
|
|
||||||
|
// Process cached events
|
||||||
|
for (const event of cachedEvents) {
|
||||||
|
processEvent(event);
|
||||||
|
}
|
||||||
|
} catch (cacheError) {
|
||||||
|
console.error('Error retrieving cached events:', cacheError);
|
||||||
|
// Continue even if cache retrieval fails - we'll try to fetch from network
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error loading cached feed:', error);
|
||||||
|
} finally {
|
||||||
|
setLoading(false);
|
||||||
|
}
|
||||||
|
}, [ndk, options.feedType, options.limit, options.since, options.until, processEvent]);
|
||||||
|
|
||||||
// Refresh feed (clear events and reload)
|
// Refresh feed (clear events and reload)
|
||||||
const refresh = useCallback(async () => {
|
const refresh = useCallback(async () => {
|
||||||
@ -330,8 +549,17 @@ export function useSocialFeed(
|
|||||||
quotedEvents.current.clear(); // Also reset quoted events
|
quotedEvents.current.clear(); // Also reset quoted events
|
||||||
setOldestTimestamp(null);
|
setOldestTimestamp(null);
|
||||||
setHasMore(true);
|
setHasMore(true);
|
||||||
|
|
||||||
|
// Check if we're online
|
||||||
|
const isOnline = await ConnectivityService.getInstance().checkNetworkStatus();
|
||||||
|
setIsOffline(!isOnline);
|
||||||
|
|
||||||
|
if (isOnline) {
|
||||||
await loadFeed();
|
await loadFeed();
|
||||||
}, [loadFeed, options.feedType]);
|
} else {
|
||||||
|
await loadCachedFeed();
|
||||||
|
}
|
||||||
|
}, [loadFeed, loadCachedFeed, options.feedType]);
|
||||||
|
|
||||||
// Load more (pagination)
|
// Load more (pagination)
|
||||||
const loadMore = useCallback(async () => {
|
const loadMore = useCallback(async () => {
|
||||||
@ -375,17 +603,41 @@ export function useSocialFeed(
|
|||||||
|
|
||||||
// Load feed on mount or when dependencies change
|
// Load feed on mount or when dependencies change
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (ndk) {
|
let isMounted = true;
|
||||||
|
|
||||||
|
const initFeed = async () => {
|
||||||
|
if (!ndk || !isMounted) return;
|
||||||
|
|
||||||
|
// Check if we're online
|
||||||
|
const isOnline = await ConnectivityService.getInstance().checkNetworkStatus();
|
||||||
|
if (!isMounted) return;
|
||||||
|
|
||||||
|
setIsOffline(!isOnline);
|
||||||
|
|
||||||
|
if (isOnline) {
|
||||||
loadFeed();
|
loadFeed();
|
||||||
|
} else {
|
||||||
|
loadCachedFeed();
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
initFeed();
|
||||||
|
|
||||||
// Clean up subscription on unmount
|
// Clean up subscription on unmount
|
||||||
return () => {
|
return () => {
|
||||||
|
isMounted = false;
|
||||||
if (subscriptionRef.current) {
|
if (subscriptionRef.current) {
|
||||||
subscriptionRef.current.unsubscribe();
|
subscriptionRef.current.unsubscribe();
|
||||||
|
subscriptionRef.current = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clear any pending cooldown timer
|
||||||
|
if (subscriptionCooldown.current) {
|
||||||
|
clearTimeout(subscriptionCooldown.current);
|
||||||
|
subscriptionCooldown.current = null;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}, [ndk, loadFeed]);
|
}, [ndk]); // Only depend on ndk to prevent infinite loops
|
||||||
|
|
||||||
return {
|
return {
|
||||||
feedItems,
|
feedItems,
|
||||||
@ -393,6 +645,7 @@ export function useSocialFeed(
|
|||||||
refresh,
|
refresh,
|
||||||
loadMore,
|
loadMore,
|
||||||
hasMore,
|
hasMore,
|
||||||
|
isOffline,
|
||||||
socialService: socialServiceRef.current
|
socialService: socialServiceRef.current
|
||||||
};
|
};
|
||||||
}
|
}
|
@ -1,14 +1,210 @@
|
|||||||
// lib/social/SocialFeedService.ts
|
// lib/social/SocialFeedService.ts
|
||||||
import NDK, { NDKEvent, NDKFilter, NDKSubscription } from '@nostr-dev-kit/ndk-mobile';
|
import NDK, { NDKEvent, NDKFilter, NDKSubscription, NDKSubscriptionCacheUsage } from '@nostr-dev-kit/ndk-mobile';
|
||||||
import { POWR_EVENT_KINDS } from '@/types/nostr-workout';
|
import { POWR_EVENT_KINDS } from '@/types/nostr-workout';
|
||||||
import { NostrWorkoutService } from '@/lib/db/services/NostrWorkoutService';
|
import { NostrWorkoutService } from '@/lib/db/services/NostrWorkoutService';
|
||||||
import { Workout } from '@/types/workout';
|
import { Workout } from '@/types/workout';
|
||||||
|
import { SQLiteDatabase } from 'expo-sqlite';
|
||||||
|
import { getSocialFeedCache } from '@/lib/db/services/SocialFeedCache';
|
||||||
|
import { ConnectivityService } from '@/lib/db/services/ConnectivityService';
|
||||||
|
import { POWR_PUBKEY_HEX } from '@/lib/hooks/useFeedHooks';
|
||||||
|
|
||||||
export class SocialFeedService {
|
export class SocialFeedService {
|
||||||
private ndk: NDK;
|
private ndk: NDK;
|
||||||
|
private socialFeedCache: ReturnType<typeof getSocialFeedCache> | null = null;
|
||||||
|
private db: SQLiteDatabase | null = null;
|
||||||
|
|
||||||
constructor(ndk: NDK) {
|
constructor(ndk: NDK, db?: SQLiteDatabase) {
|
||||||
this.ndk = ndk;
|
this.ndk = ndk;
|
||||||
|
|
||||||
|
if (db) {
|
||||||
|
this.db = db;
|
||||||
|
try {
|
||||||
|
this.socialFeedCache = getSocialFeedCache(db);
|
||||||
|
this.socialFeedCache.setNDK(ndk);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[SocialFeedService] Error initializing SocialFeedCache:', error);
|
||||||
|
// Continue without cache - we'll still be able to fetch from network
|
||||||
|
this.socialFeedCache = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get cached events for a feed
|
||||||
|
* @param feedType Type of feed (following, powr, global)
|
||||||
|
* @param limit Maximum number of events to return
|
||||||
|
* @param since Timestamp to fetch events since (inclusive)
|
||||||
|
* @param until Timestamp to fetch events until (inclusive)
|
||||||
|
* @returns Array of cached events
|
||||||
|
*/
|
||||||
|
async getCachedEvents(
|
||||||
|
feedType: string,
|
||||||
|
limit: number = 20,
|
||||||
|
since?: number,
|
||||||
|
until?: number
|
||||||
|
): Promise<NDKEvent[]> {
|
||||||
|
if (!this.socialFeedCache) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await this.socialFeedCache.getCachedEvents(feedType, limit, since, until);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[SocialFeedService] Error retrieving cached events:', error);
|
||||||
|
// Return empty array on error
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build filters for a feed subscription
|
||||||
|
* @param options Filter options
|
||||||
|
* @returns NDK filter object or array of filters
|
||||||
|
*/
|
||||||
|
buildFilters(options: {
|
||||||
|
feedType: 'following' | 'powr' | 'global' | 'profile';
|
||||||
|
since?: number;
|
||||||
|
until?: number;
|
||||||
|
limit?: number;
|
||||||
|
authors?: string[];
|
||||||
|
kinds?: number[];
|
||||||
|
}): NDKFilter | NDKFilter[] {
|
||||||
|
const { feedType, since, until, limit, authors, kinds } = options;
|
||||||
|
|
||||||
|
// Default to events in the last 24 hours if no since provided
|
||||||
|
const defaultSince = Math.floor(Date.now() / 1000) - 24 * 60 * 60;
|
||||||
|
|
||||||
|
// Fitness-related tags for filtering
|
||||||
|
const tagFilter = [
|
||||||
|
'workout', 'fitness', 'powr', '31days',
|
||||||
|
'crossfit', 'wod', 'gym', 'strength',
|
||||||
|
'cardio', 'training', 'exercise'
|
||||||
|
];
|
||||||
|
|
||||||
|
// Determine which kinds to include
|
||||||
|
const workoutKinds: number[] = [];
|
||||||
|
const socialKinds: number[] = [];
|
||||||
|
|
||||||
|
// Add workout-specific kinds (1301, 33401, 33402)
|
||||||
|
if (!kinds || kinds.some(k => [1301, 33401, 33402].includes(k))) {
|
||||||
|
[1301, 33401, 33402]
|
||||||
|
.filter(k => !kinds || kinds.includes(k))
|
||||||
|
.forEach(k => workoutKinds.push(k));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add social post kind (1) and article kind (30023)
|
||||||
|
if (!kinds || kinds.includes(1)) {
|
||||||
|
socialKinds.push(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!kinds || kinds.includes(30023)) {
|
||||||
|
socialKinds.push(30023);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Base filter properties
|
||||||
|
const baseFilter: Record<string, any> = {
|
||||||
|
since: since || defaultSince,
|
||||||
|
limit: limit || 30,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (until) {
|
||||||
|
baseFilter.until = until;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Special handling for different feed types
|
||||||
|
if (feedType === 'profile') {
|
||||||
|
// Profile feed: Show all of a user's posts
|
||||||
|
if (!Array.isArray(authors) || authors.length === 0) {
|
||||||
|
console.error('[SocialFeedService] Profile feed requires authors');
|
||||||
|
return { ...baseFilter, kinds: [] }; // Return empty filter if no authors
|
||||||
|
}
|
||||||
|
|
||||||
|
// For profile feed, we create two filters:
|
||||||
|
// 1. All workout-related kinds from the user
|
||||||
|
// 2. Social posts and articles from the user (with or without tags)
|
||||||
|
return [
|
||||||
|
// Workout-related kinds (no tag filtering)
|
||||||
|
{
|
||||||
|
...baseFilter,
|
||||||
|
kinds: workoutKinds,
|
||||||
|
authors: authors,
|
||||||
|
},
|
||||||
|
// Social posts and articles (no tag filtering for profile)
|
||||||
|
{
|
||||||
|
...baseFilter,
|
||||||
|
kinds: socialKinds,
|
||||||
|
authors: authors,
|
||||||
|
}
|
||||||
|
];
|
||||||
|
} else if (feedType === 'powr') {
|
||||||
|
// POWR feed: Show all content from POWR account(s)
|
||||||
|
if (!Array.isArray(authors) || authors.length === 0) {
|
||||||
|
console.error('[SocialFeedService] POWR feed requires authors');
|
||||||
|
return { ...baseFilter, kinds: [] }; // Return empty filter if no authors
|
||||||
|
}
|
||||||
|
|
||||||
|
// For POWR feed, we don't apply tag filtering
|
||||||
|
return {
|
||||||
|
...baseFilter,
|
||||||
|
kinds: [...workoutKinds, ...socialKinds],
|
||||||
|
authors: authors,
|
||||||
|
};
|
||||||
|
} else if (feedType === 'following') {
|
||||||
|
// Following feed: Show content from followed users
|
||||||
|
if (!Array.isArray(authors) || authors.length === 0) {
|
||||||
|
console.error('[SocialFeedService] Following feed requires authors');
|
||||||
|
return { ...baseFilter, kinds: [] }; // Return empty filter if no authors
|
||||||
|
}
|
||||||
|
|
||||||
|
// For following feed, we create two filters:
|
||||||
|
// 1. All workout-related kinds from followed users
|
||||||
|
// 2. Social posts and articles from followed users with fitness tags
|
||||||
|
|
||||||
|
// Log the authors to help with debugging
|
||||||
|
console.log(`[SocialFeedService] Following feed with ${authors.length} authors:`,
|
||||||
|
authors.length > 5 ? authors.slice(0, 5).join(', ') + '...' : authors.join(', '));
|
||||||
|
|
||||||
|
// Always include POWR account in following feed
|
||||||
|
let followingAuthors = [...authors];
|
||||||
|
if (POWR_PUBKEY_HEX && !followingAuthors.includes(POWR_PUBKEY_HEX)) {
|
||||||
|
followingAuthors.push(POWR_PUBKEY_HEX);
|
||||||
|
console.log('[SocialFeedService] Added POWR account to following feed authors');
|
||||||
|
}
|
||||||
|
|
||||||
|
return [
|
||||||
|
// Workout-related kinds (no tag filtering)
|
||||||
|
{
|
||||||
|
...baseFilter,
|
||||||
|
kinds: workoutKinds,
|
||||||
|
authors: followingAuthors,
|
||||||
|
},
|
||||||
|
// Social posts and articles (with tag filtering)
|
||||||
|
{
|
||||||
|
...baseFilter,
|
||||||
|
kinds: socialKinds,
|
||||||
|
authors: followingAuthors,
|
||||||
|
'#t': tagFilter,
|
||||||
|
}
|
||||||
|
];
|
||||||
|
} else {
|
||||||
|
// Global feed: Show content from anyone
|
||||||
|
// For global feed, we create two filters:
|
||||||
|
// 1. All workout-related kinds from anyone
|
||||||
|
// 2. Social posts and articles from anyone with fitness tags
|
||||||
|
return [
|
||||||
|
// Workout-related kinds (no tag filtering)
|
||||||
|
{
|
||||||
|
...baseFilter,
|
||||||
|
kinds: workoutKinds,
|
||||||
|
},
|
||||||
|
// Social posts and articles (with tag filtering)
|
||||||
|
{
|
||||||
|
...baseFilter,
|
||||||
|
kinds: socialKinds,
|
||||||
|
'#t': tagFilter,
|
||||||
|
}
|
||||||
|
];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -17,7 +213,7 @@ export class SocialFeedService {
|
|||||||
* @returns Subscription object with unsubscribe method
|
* @returns Subscription object with unsubscribe method
|
||||||
*/
|
*/
|
||||||
subscribeFeed(options: {
|
subscribeFeed(options: {
|
||||||
feedType: 'following' | 'powr' | 'global';
|
feedType: 'following' | 'powr' | 'global' | 'profile';
|
||||||
since?: number;
|
since?: number;
|
||||||
until?: number;
|
until?: number;
|
||||||
limit?: number;
|
limit?: number;
|
||||||
@ -26,124 +222,47 @@ export class SocialFeedService {
|
|||||||
onEvent: (event: NDKEvent) => void;
|
onEvent: (event: NDKEvent) => void;
|
||||||
onEose?: () => void;
|
onEose?: () => void;
|
||||||
}): Promise<{ unsubscribe: () => void }> {
|
}): Promise<{ unsubscribe: () => void }> {
|
||||||
const { feedType, since, until, limit, authors, kinds, onEvent, onEose } = options;
|
const { feedType, onEvent, onEose } = options;
|
||||||
|
|
||||||
// Default to events in the last 24 hours if no since provided
|
// Build the filter using our buildFilters method
|
||||||
const defaultSince = Math.floor(Date.now() / 1000) - 24 * 60 * 60;
|
const consolidatedFilter = this.buildFilters(options);
|
||||||
|
|
||||||
// Create filters based on feedType
|
// Log the consolidated filter
|
||||||
const filters: NDKFilter[] = [];
|
console.log(`[SocialFeedService] Subscribing to ${feedType} feed with filter:`, consolidatedFilter);
|
||||||
|
|
||||||
// Workout content filter
|
// Create a single subscription with the consolidated filter
|
||||||
if (!kinds || kinds.some(k => [1301, 33401, 33402].includes(k))) {
|
const subscription = this.ndk.subscribe(consolidatedFilter, {
|
||||||
const workoutFilter: NDKFilter = {
|
closeOnEose: false // Keep subscription open for real-time updates
|
||||||
kinds: [1301, 33401, 33402].filter(k => !kinds || kinds.includes(k)) as any[],
|
});
|
||||||
since: since || defaultSince,
|
|
||||||
limit: limit || 20,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (until) {
|
|
||||||
workoutFilter.until = until;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (feedType === 'following' || feedType === 'powr') {
|
|
||||||
if (Array.isArray(authors) && authors.length > 0) {
|
|
||||||
workoutFilter.authors = authors;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
filters.push(workoutFilter);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Social post filter
|
|
||||||
if (!kinds || kinds.includes(1)) {
|
|
||||||
const socialPostFilter: NDKFilter = {
|
|
||||||
kinds: [1] as any[],
|
|
||||||
since: since || defaultSince,
|
|
||||||
limit: limit || 20,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (until) {
|
|
||||||
socialPostFilter.until = until;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (feedType === 'following' || feedType === 'powr') {
|
|
||||||
if (Array.isArray(authors) && authors.length > 0) {
|
|
||||||
socialPostFilter.authors = authors;
|
|
||||||
}
|
|
||||||
} else if (feedType === 'global') {
|
|
||||||
// For global feed, add some relevant tags for filtering
|
|
||||||
socialPostFilter['#t'] = ['workout', 'fitness', 'powr'];
|
|
||||||
}
|
|
||||||
|
|
||||||
filters.push(socialPostFilter);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Article filter
|
|
||||||
if (!kinds || kinds.includes(30023)) {
|
|
||||||
const articleFilter: NDKFilter = {
|
|
||||||
kinds: [30023] as any[],
|
|
||||||
since: since || defaultSince,
|
|
||||||
limit: limit || 20,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (until) {
|
|
||||||
articleFilter.until = until;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (feedType === 'following' || feedType === 'powr') {
|
|
||||||
if (Array.isArray(authors) && authors.length > 0) {
|
|
||||||
articleFilter.authors = authors;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
filters.push(articleFilter);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Special case for POWR feed - also include draft articles
|
|
||||||
if (feedType === 'powr' && (!kinds || kinds.includes(30024))) {
|
|
||||||
const draftFilter: NDKFilter = {
|
|
||||||
kinds: [30024] as any[],
|
|
||||||
since: since || defaultSince,
|
|
||||||
limit: limit || 20,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (until) {
|
|
||||||
draftFilter.until = until;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Array.isArray(authors) && authors.length > 0) {
|
|
||||||
draftFilter.authors = authors;
|
|
||||||
}
|
|
||||||
|
|
||||||
filters.push(draftFilter);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create subscriptions
|
|
||||||
const subscriptions: NDKSubscription[] = [];
|
|
||||||
|
|
||||||
// Create a subscription for each filter
|
|
||||||
for (const filter of filters) {
|
|
||||||
console.log(`Subscribing with filter:`, filter);
|
|
||||||
const subscription = this.ndk.subscribe(filter);
|
|
||||||
|
|
||||||
|
// Set up event handler
|
||||||
subscription.on('event', (event: NDKEvent) => {
|
subscription.on('event', (event: NDKEvent) => {
|
||||||
|
// Cache the event if we have a cache
|
||||||
|
if (this.socialFeedCache) {
|
||||||
|
try {
|
||||||
|
this.socialFeedCache.cacheEvent(event, feedType)
|
||||||
|
.catch(err => console.error('[SocialFeedService] Error caching event:', err));
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[SocialFeedService] Exception while caching event:', error);
|
||||||
|
// Continue even if caching fails - we'll still pass the event to the callback
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pass the event to the callback
|
||||||
onEvent(event);
|
onEvent(event);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Set up EOSE handler
|
||||||
subscription.on('eose', () => {
|
subscription.on('eose', () => {
|
||||||
|
console.log(`[SocialFeedService] Received EOSE for ${feedType} feed`);
|
||||||
if (onEose) onEose();
|
if (onEose) onEose();
|
||||||
});
|
});
|
||||||
|
|
||||||
subscriptions.push(subscription);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return a Promise with the unsubscribe object
|
// Return a Promise with the unsubscribe object
|
||||||
return Promise.resolve({
|
return Promise.resolve({
|
||||||
unsubscribe: () => {
|
unsubscribe: () => {
|
||||||
subscriptions.forEach(sub => {
|
console.log(`[SocialFeedService] Unsubscribing from ${feedType} feed`);
|
||||||
sub.stop();
|
subscription.stop();
|
||||||
});
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -226,6 +345,20 @@ export class SocialFeedService {
|
|||||||
* @returns The referenced event or null
|
* @returns The referenced event or null
|
||||||
*/
|
*/
|
||||||
async getReferencedContent(eventId: string, kind: number): Promise<NDKEvent | null> {
|
async getReferencedContent(eventId: string, kind: number): Promise<NDKEvent | null> {
|
||||||
|
// First check if we have it in the cache
|
||||||
|
if (this.socialFeedCache) {
|
||||||
|
try {
|
||||||
|
const cachedEvent = await this.socialFeedCache.getCachedEvent(eventId);
|
||||||
|
if (cachedEvent) {
|
||||||
|
return cachedEvent;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[SocialFeedService] Error retrieving cached event:', error);
|
||||||
|
// Continue to network fetch if cache fails
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If not in cache or no cache available, try to fetch from network
|
||||||
// Handle addressable content (a-tag references)
|
// Handle addressable content (a-tag references)
|
||||||
if (eventId.includes(':')) {
|
if (eventId.includes(':')) {
|
||||||
const parts = eventId.split(':');
|
const parts = eventId.split(':');
|
||||||
@ -236,19 +369,57 @@ export class SocialFeedService {
|
|||||||
authors: [parts[1]],
|
authors: [parts[1]],
|
||||||
"#d": [parts[2]],
|
"#d": [parts[2]],
|
||||||
};
|
};
|
||||||
const events = await this.ndk.fetchEvents(filter);
|
|
||||||
return events.size > 0 ? Array.from(events)[0] : null;
|
const events = await this.ndk.fetchEvents(filter, {
|
||||||
|
cacheUsage: NDKSubscriptionCacheUsage.CACHE_FIRST
|
||||||
|
});
|
||||||
|
|
||||||
|
if (events.size > 0) {
|
||||||
|
const event = Array.from(events)[0];
|
||||||
|
|
||||||
|
// Cache the event if we have a cache
|
||||||
|
if (this.socialFeedCache) {
|
||||||
|
try {
|
||||||
|
await this.socialFeedCache.cacheEvent(event, 'referenced');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[SocialFeedService] Error caching referenced event:', error);
|
||||||
|
// Continue even if caching fails - we can still return the event
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return event;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Standard event reference (direct ID)
|
// Standard event reference (direct ID)
|
||||||
const filter: NDKFilter = {
|
const filter: NDKFilter = {
|
||||||
ids: [eventId],
|
ids: [eventId] as string[],
|
||||||
kinds: [kind],
|
kinds: [kind] as number[],
|
||||||
};
|
};
|
||||||
|
|
||||||
const events = await this.ndk.fetchEvents(filter);
|
const events = await this.ndk.fetchEvents(filter, {
|
||||||
return events.size > 0 ? Array.from(events)[0] : null;
|
cacheUsage: NDKSubscriptionCacheUsage.CACHE_FIRST
|
||||||
|
});
|
||||||
|
|
||||||
|
if (events.size > 0) {
|
||||||
|
const event = Array.from(events)[0];
|
||||||
|
|
||||||
|
// Cache the event if we have a cache
|
||||||
|
if (this.socialFeedCache) {
|
||||||
|
try {
|
||||||
|
await this.socialFeedCache.cacheEvent(event, 'referenced');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[SocialFeedService] Error caching referenced event:', error);
|
||||||
|
// Continue even if caching fails - we can still return the event
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return event;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -304,7 +475,25 @@ export class SocialFeedService {
|
|||||||
event.created_at = eventData.created_at;
|
event.created_at = eventData.created_at;
|
||||||
|
|
||||||
await event.sign();
|
await event.sign();
|
||||||
|
|
||||||
|
// Check if we're online before publishing
|
||||||
|
const isOnline = await ConnectivityService.getInstance().checkNetworkStatus();
|
||||||
|
|
||||||
|
if (isOnline) {
|
||||||
await event.publish();
|
await event.publish();
|
||||||
|
} else {
|
||||||
|
console.log('[SocialFeedService] Offline, event will be published when online');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache the event if we have a cache
|
||||||
|
if (this.socialFeedCache) {
|
||||||
|
try {
|
||||||
|
await this.socialFeedCache.cacheEvent(event, 'workout');
|
||||||
|
} catch (error) {
|
||||||
|
console.error('[SocialFeedService] Error caching workout event:', error);
|
||||||
|
// Continue even if caching fails - the event was still published
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Create social share if requested
|
// Create social share if requested
|
||||||
if (options.shareAsSocialPost && options.socialText) {
|
if (options.shareAsSocialPost && options.socialText) {
|
||||||
@ -334,17 +523,17 @@ export class SocialFeedService {
|
|||||||
await event.sign();
|
await event.sign();
|
||||||
await event.publish();
|
await event.publish();
|
||||||
return event;
|
return event;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get POWR team pubkeys - to be replaced with actual pubkeys
|
* Get POWR team pubkeys - to be replaced with actual pubkeys
|
||||||
* @returns Array of POWR team pubkeys
|
* @returns Array of POWR team pubkeys
|
||||||
*/
|
*/
|
||||||
private getPOWRTeamPubkeys(): string[] {
|
private getPOWRTeamPubkeys(): string[] {
|
||||||
// Replace with actual POWR team pubkeys
|
// Replace with actual POWR team pubkeys
|
||||||
return [
|
return [
|
||||||
// TODO: Add actual POWR team pubkeys
|
// TODO: Add actual POWR team pubkeys
|
||||||
'55127fc9e1c03c6b459a3bab72fdb99def1644c5f239bdd09f3e5fb401ed9b21',
|
'55127fc9e1c03c6b459a3bab72fdb99def1644c5f239bdd09f3e5fb401ed9b21',
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
}
|
}
|
Loading…
x
Reference in New Issue
Block a user