import { useEffect, useState } from "react"; // API base URL - empty for same-origin (local dev), or set via env var for production const API_URL = "https://atfeeds-api.stevedsimkins.workers.dev"; interface BskyPostRef { uri: string; cid: string; } interface Publication { url: string; name: string; description?: string; iconCid?: string; iconUrl?: string; } interface Document { uri: string; did: string; rkey: string; title: string; description?: string; path?: string; site?: string; content?: { $type: string; markdown?: string; }; textContent?: string; coverImageCid?: string; coverImageUrl?: string; bskyPostRef?: BskyPostRef; tags?: string[]; publishedAt?: string; updatedAt?: string; publication?: Publication; viewUrl?: string; pdsEndpoint?: string; } interface FeedResponse { count: number; limit: number; offset: number; documents: Document[]; } function App() { const [documents, setDocuments] = useState([]); const [loading, setLoading] = useState(true); const [error, setError] = useState(null); const fetchFeed = async () => { setLoading(true); setError(null); try { const response = await fetch(`${API_URL}/feed?limit=100`); if (!response.ok) { throw new Error("Failed to fetch feed"); } const data: FeedResponse = await response.json(); setDocuments(data.documents); } catch (err) { setError(err instanceof Error ? err.message : "Unknown error"); } finally { setLoading(false); } }; useEffect(() => { fetchFeed(); }, []); const formatDate = (dateString?: string) => { if (!dateString) return "Unknown date"; const date = new Date(dateString); const now = new Date(); const diff = now.getTime() - date.getTime(); const minutes = Math.floor(diff / 60000); const hours = Math.floor(diff / 3600000); const days = Math.floor(diff / 86400000); if (minutes < 1) return "just now"; if (minutes < 60) return `${minutes} minute${minutes > 1 ? "s" : ""} ago`; if (hours < 24) return `${hours} hour${hours > 1 ? "s" : ""} ago`; if (days < 7) return `${days} day${days > 1 ? "s" : ""} ago`; return date.toLocaleDateString("en-US", { year: "numeric", month: "long", day: "numeric", }); }; const truncateText = (text?: string, maxLength: number = 200) => { if (!text) return ""; if (text.length <= maxLength) return text; return text.slice(0, maxLength) + "..."; }; const getDescription = (doc: Document) => { return doc.description || doc.textContent || ""; }; return (
Docs.surf - Microsoft Internet Explorer
{/* IE Chrome Container */}
{/* Menu Bar */}
{["File", "Edit", "View", "Favorites", "Tools", "Help"].map( (item) => ( {item} ), )}
{/* Toolbar */}
{/* Back button */}
Back Back
{/* Forward button */}
Forward
{/* Stop */}
Stop
{/* Refresh */}
Refresh
{/* Home */} Home
{/* Search */}
Search Search
{/* Favorites */}
Favorites Favorites
{/* Mail */}
Mail
{/* Print */}
Print
{/* Address Bar */}
Address
https://docs.surf
Links »
{loading &&

Searching...

} {error && (

Error: {error}

)} {!loading && !error && (

Welcome to Docs.surf! 🏄

RSS
What is this?

Docs.surf is a{" "} Standard.site {" "} aggregator, pulling all valid Publications and Documents into a single chronological feed. You can think of it like RSS, but there's no manual collection. It's all powered by{" "} ATProto , a new protocol to power connections across the web.

Source code can be found at{" "} tangled.org/stevedylandev/docs.surf

{documents.map((doc, index) => (
{/* Thumbnail on the left */}
{doc.coverImageUrl || doc.publication?.iconUrl ? ( {doc.title} ) : ( Default )}
{/* Content on the right */}
{/* Title */}

{doc.viewUrl ? ( {doc.title} ) : ( doc.title )}

{/* Description */} {getDescription(doc) && (

{truncateText(getDescription(doc), 150)}

)} {/* Publication name and timestamp */}
{/* RSS icon on the far right */} {/*
*/}
))} {documents.length === 0 &&

No documents found.

}
)}

Done

); } export default App;