Monorepo for wisp.place. A static site hosting service built on top of the AT Protocol. wisp.place

Compare changes

Choose any two refs to compare.

Changed files
+479 -544
.tangled
workflows
apps
cli
+1
.tangled/workflows/test.yml
··· 7 7 dependencies: 8 8 nixpkgs: 9 9 - git 10 + - findutils 10 11 github:NixOS/nixpkgs/nixpkgs-unstable: 11 12 - bun 12 13
+23
apps/hosting-service/src/lib/file-serving.ts
··· 550 550 const rewrittenKey = getCacheKey(did, rkey, fileRequestPath, `rewritten:${basePath}`); 551 551 const rewrittenContent = rewrittenHtmlCache.get(rewrittenKey); 552 552 if (rewrittenContent) { 553 + console.log(`[HTML Rewrite] Serving from rewritten cache: ${rewrittenKey}`); 553 554 const headers: Record<string, string> = { 554 555 'Content-Type': 'text/html; charset=utf-8', 555 556 'Content-Encoding': 'gzip', ··· 569 570 const meta = result.metadata.customMetadata as { encoding?: string; mimeType?: string } | undefined; 570 571 const mimeType = meta?.mimeType || lookup(fileRequestPath) || 'application/octet-stream'; 571 572 const isGzipped = meta?.encoding === 'gzip'; 573 + 574 + console.log(`[File Serve] Serving ${fileRequestPath}, mimeType: ${mimeType}, isHTML: ${isHtmlContent(fileRequestPath, mimeType)}, basePath: ${basePath}`); 572 575 573 576 // Check if this is HTML content that needs rewriting 574 577 if (isHtmlContent(fileRequestPath, mimeType)) { 578 + console.log(`[HTML Rewrite] Processing ${fileRequestPath}, basePath: ${basePath}, mimeType: ${mimeType}, isGzipped: ${isGzipped}`); 575 579 let htmlContent: string; 576 580 if (isGzipped) { 577 581 // Verify content is actually gzipped ··· 586 590 } else { 587 591 htmlContent = content.toString('utf-8'); 588 592 } 593 + // Check for <base> tag which can override paths 594 + const baseTagMatch = htmlContent.match(/<base\s+[^>]*href=["'][^"']+["'][^>]*>/i); 595 + if (baseTagMatch) { 596 + console.warn(`[HTML Rewrite] WARNING: <base> tag found: ${baseTagMatch[0]} - this may override path rewrites`); 597 + } 598 + 599 + // Find src/href attributes (quoted and unquoted) to debug 600 + const allMatches = htmlContent.match(/(?:src|href)\s*=\s*["']?\/[^"'\s>]+/g); 601 + console.log(`[HTML Rewrite] Found ${allMatches ? allMatches.length : 0} local path attrs`); 602 + if (allMatches && allMatches.length > 0) { 603 + console.log(`[HTML Rewrite] Sample paths: ${allMatches.slice(0, 5).join(', ')}`); 604 + } 605 + 589 606 const rewritten = rewriteHtmlPaths(htmlContent, basePath, fileRequestPath); 607 + 608 + const rewrittenMatches = rewritten.match(/(?:src|href)\s*=\s*["']?\/[^"'\s>]+/g); 609 + console.log(`[HTML Rewrite] After rewrite, found ${rewrittenMatches ? rewrittenMatches.length : 0} local paths`); 610 + if (rewrittenMatches && rewrittenMatches.length > 0) { 611 + console.log(`[HTML Rewrite] Sample rewritten: ${rewrittenMatches.slice(0, 5).join(', ')}`); 612 + } 590 613 591 614 // Recompress and cache the rewritten HTML 592 615 const { gzipSync } = await import('zlib');
+16
apps/hosting-service/src/lib/html-rewriter.ts
··· 189 189 `\\b${attr}[ \\t]{0,5}=[ \\t]{0,5}'([^']*)'`, 190 190 'gi' 191 191 ) 192 + // Unquoted attributes (valid in HTML5 for values without spaces/special chars) 193 + // Match: attr=value where value starts immediately (no quotes) and continues until space or > 194 + // Use negative lookahead to ensure we don't match quoted attributes 195 + const unquotedRegex = new RegExp( 196 + `\\b${attr}[ \\t]{0,5}=[ \\t]{0,5}(?!["'])([^\\s>]+)`, 197 + 'gi' 198 + ) 192 199 193 200 rewritten = rewritten.replace(doubleQuoteRegex, (match, value) => { 194 201 const rewrittenValue = rewritePath( ··· 206 213 documentPath 207 214 ) 208 215 return `${attr}='${rewrittenValue}'` 216 + }) 217 + 218 + rewritten = rewritten.replace(unquotedRegex, (match, value) => { 219 + const rewrittenValue = rewritePath( 220 + value, 221 + normalizedBase, 222 + documentPath 223 + ) 224 + return `${attr}=${rewrittenValue}` 209 225 }) 210 226 } 211 227 }
+94 -2
apps/hosting-service/src/lib/storage.ts
··· 7 7 * - Cold (S3/R2): Object storage as source of truth (optional) 8 8 * 9 9 * When S3 is not configured, falls back to disk-only mode (warm tier acts as source of truth). 10 + * In cache-only mode (non-master nodes), S3 writes are skipped even if configured. 10 11 */ 11 12 12 13 import { ··· 15 16 DiskStorageTier, 16 17 S3StorageTier, 17 18 type StorageTier, 19 + type StorageMetadata, 18 20 } from 'tiered-storage'; 19 21 20 22 const CACHE_DIR = process.env.CACHE_DIR || './cache/sites'; ··· 22 24 const HOT_CACHE_COUNT = parseInt(process.env.HOT_CACHE_COUNT || '500', 10); 23 25 const WARM_CACHE_SIZE = parseInt(process.env.WARM_CACHE_SIZE || '10737418240', 10); // 10GB default 24 26 const WARM_EVICTION_POLICY = (process.env.WARM_EVICTION_POLICY || 'lru') as 'lru' | 'fifo' | 'size'; 27 + 28 + // Cache-only mode: skip S3 writes (non-master nodes) 29 + // This is the same flag used to skip database writes 30 + const CACHE_ONLY_MODE = process.env.CACHE_ONLY_MODE === 'true'; 25 31 26 32 // S3/Cold tier configuration (optional) 27 33 const S3_BUCKET = process.env.S3_BUCKET || ''; ··· 49 55 }; 50 56 51 57 /** 58 + * Read-only wrapper for S3 tier in cache-only mode. 59 + * Allows reads from S3 but skips all writes (for non-master nodes). 60 + */ 61 + class ReadOnlyS3Tier implements StorageTier { 62 + private static hasLoggedWriteSkip = false; 63 + 64 + constructor(private tier: StorageTier) {} 65 + 66 + // Read operations - pass through to underlying tier 67 + async get(key: string) { 68 + return this.tier.get(key); 69 + } 70 + 71 + async getWithMetadata(key: string) { 72 + return this.tier.getWithMetadata?.(key) ?? null; 73 + } 74 + 75 + async getStream(key: string) { 76 + return this.tier.getStream?.(key) ?? null; 77 + } 78 + 79 + async exists(key: string) { 80 + return this.tier.exists(key); 81 + } 82 + 83 + async getMetadata(key: string) { 84 + return this.tier.getMetadata(key); 85 + } 86 + 87 + async *listKeys(prefix?: string) { 88 + yield* this.tier.listKeys(prefix); 89 + } 90 + 91 + async getStats() { 92 + return this.tier.getStats(); 93 + } 94 + 95 + // Write operations - no-op in cache-only mode 96 + async set(key: string, _data: Uint8Array, _metadata: StorageMetadata) { 97 + this.logWriteSkip('set', key); 98 + } 99 + 100 + async setStream(key: string, _stream: NodeJS.ReadableStream, _metadata: StorageMetadata) { 101 + this.logWriteSkip('setStream', key); 102 + } 103 + 104 + async setMetadata(key: string, _metadata: StorageMetadata) { 105 + this.logWriteSkip('setMetadata', key); 106 + } 107 + 108 + async delete(key: string) { 109 + this.logWriteSkip('delete', key); 110 + } 111 + 112 + async deleteMany(keys: string[]) { 113 + this.logWriteSkip('deleteMany', `${keys.length} keys`); 114 + } 115 + 116 + async clear() { 117 + this.logWriteSkip('clear', 'all keys'); 118 + } 119 + 120 + private logWriteSkip(operation: string, key: string) { 121 + // Only log once to avoid spam 122 + if (!ReadOnlyS3Tier.hasLoggedWriteSkip) { 123 + console.log(`[Storage] Cache-only mode: skipping S3 writes (operation: ${operation})`); 124 + ReadOnlyS3Tier.hasLoggedWriteSkip = true; 125 + } 126 + } 127 + } 128 + 129 + /** 52 130 * Initialize tiered storage 53 131 * Must be called before serving requests 54 132 */ ··· 66 144 67 145 if (S3_BUCKET) { 68 146 // Full three-tier setup with S3 as cold storage 69 - coldTier = new S3StorageTier({ 147 + const s3Tier = new S3StorageTier({ 70 148 bucket: S3_BUCKET, 71 149 metadataBucket: S3_METADATA_BUCKET, 72 150 region: S3_REGION, ··· 78 156 : undefined, 79 157 prefix: S3_PREFIX, 80 158 }); 159 + 160 + // In cache-only mode, wrap S3 tier to make it read-only 161 + coldTier = CACHE_ONLY_MODE ? new ReadOnlyS3Tier(s3Tier) : s3Tier; 81 162 warmTier = diskTier; 82 - console.log('[Storage] Using S3 as cold tier, disk as warm tier'); 163 + 164 + if (CACHE_ONLY_MODE) { 165 + console.log('[Storage] Cache-only mode: S3 as read-only cold tier (no writes), disk as warm tier'); 166 + } else { 167 + console.log('[Storage] Using S3 as cold tier, disk as warm tier'); 168 + } 83 169 } else { 84 170 // Disk-only mode: disk tier acts as source of truth (cold) 85 171 coldTier = diskTier; ··· 104 190 105 191 // Placement rules: determine which tiers each file goes to 106 192 placementRules: [ 193 + // Metadata is critical: frequently accessed for cache validity checks 194 + { 195 + pattern: '**/.metadata.json', 196 + tiers: ['hot', 'warm', 'cold'], 197 + }, 198 + 107 199 // index.html is critical: write to all tiers for instant serving 108 200 { 109 201 pattern: '**/index.html',
+44 -141
apps/hosting-service/src/lib/utils.ts
··· 397 397 const existingMetadata = await getCacheMetadata(did, rkey); 398 398 const existingFileCids = existingMetadata?.fileCids || {}; 399 399 400 - // Use a temporary directory with timestamp to avoid collisions 401 - const tempSuffix = `.tmp-${Date.now()}-${Math.random().toString(36).slice(2, 9)}`; 402 - const tempDir = `${CACHE_DIR}/${did}/${rkey}${tempSuffix}`; 403 - const finalDir = `${CACHE_DIR}/${did}/${rkey}`; 404 - 405 - try { 406 - // Collect file CIDs from the new record (using expanded root) 407 - const newFileCids: Record<string, string> = {}; 408 - collectFileCidsFromEntries(expandedRoot.entries, '', newFileCids); 400 + // Collect file CIDs from the new record (using expanded root) 401 + const newFileCids: Record<string, string> = {}; 402 + collectFileCidsFromEntries(expandedRoot.entries, '', newFileCids); 409 403 410 - // Fetch site settings (optional) 411 - const settings = await fetchSiteSettings(did, rkey); 404 + // Fetch site settings (optional) 405 + const settings = await fetchSiteSettings(did, rkey); 412 406 413 - // Download/copy files to temporary directory (with incremental logic, using expanded root) 414 - await cacheFiles(did, rkey, expandedRoot.entries, pdsEndpoint, '', tempSuffix, existingFileCids, finalDir); 415 - await saveCacheMetadata(did, rkey, recordCid, tempSuffix, newFileCids, settings); 407 + // Download files directly to tiered storage (with incremental logic) 408 + await cacheFiles(did, rkey, expandedRoot.entries, pdsEndpoint, '', existingFileCids); 409 + await saveCacheMetadata(did, rkey, recordCid, newFileCids, settings); 416 410 417 - // Atomically replace old cache with new cache 418 - // On POSIX systems (Linux/macOS), rename is atomic 419 - if (existsSync(finalDir)) { 420 - // Rename old directory to backup 421 - const backupDir = `${finalDir}.old-${Date.now()}`; 422 - await rename(finalDir, backupDir); 423 - 424 - try { 425 - // Rename new directory to final location 426 - await rename(tempDir, finalDir); 427 - 428 - // Clean up old backup 429 - rmSync(backupDir, { recursive: true, force: true }); 430 - } catch (err) { 431 - // If rename failed, restore backup 432 - if (existsSync(backupDir) && !existsSync(finalDir)) { 433 - await rename(backupDir, finalDir); 434 - } 435 - throw err; 436 - } 437 - } else { 438 - // No existing cache, just rename temp to final 439 - await rename(tempDir, finalDir); 440 - } 441 - 442 - console.log('Successfully cached site atomically', did, rkey); 443 - } catch (err) { 444 - // Clean up temp directory on failure 445 - if (existsSync(tempDir)) { 446 - rmSync(tempDir, { recursive: true, force: true }); 447 - } 448 - throw err; 449 - } 411 + console.log('Successfully cached site', did, rkey); 450 412 } 451 413 452 414 ··· 456 418 entries: Entry[], 457 419 pdsEndpoint: string, 458 420 pathPrefix: string, 459 - dirSuffix: string = '', 460 - existingFileCids: Record<string, string> = {}, 461 - existingCacheDir?: string 421 + existingFileCids: Record<string, string> = {} 462 422 ): Promise<void> { 463 - // Collect file tasks, separating unchanged files from new/changed files 423 + // Collect file download tasks (skip unchanged files) 464 424 const downloadTasks: Array<() => Promise<void>> = []; 465 - const copyTasks: Array<() => Promise<void>> = []; 466 425 467 426 function collectFileTasks( 468 427 entries: Entry[], ··· 479 438 const cid = extractBlobCid(fileNode.blob); 480 439 481 440 // Check if file is unchanged (same CID as existing cache) 482 - if (cid && existingFileCids[currentPath] === cid && existingCacheDir) { 483 - // File unchanged - copy from existing cache instead of downloading 484 - copyTasks.push(() => copyExistingFile( 485 - did, 486 - site, 487 - currentPath, 488 - dirSuffix, 489 - existingCacheDir 490 - )); 441 + if (cid && existingFileCids[currentPath] === cid) { 442 + // File unchanged - skip download (already in tiered storage) 443 + console.log(`Skipping unchanged file: ${currentPath}`); 491 444 } else { 492 445 // File new or changed - download it 493 446 downloadTasks.push(() => cacheFileBlob( ··· 498 451 pdsEndpoint, 499 452 fileNode.encoding, 500 453 fileNode.mimeType, 501 - fileNode.base64, 502 - dirSuffix 454 + fileNode.base64 503 455 )); 504 456 } 505 457 } ··· 508 460 509 461 collectFileTasks(entries, pathPrefix); 510 462 511 - console.log(`[Incremental Update] Files to copy: ${copyTasks.length}, Files to download: ${downloadTasks.length}`); 463 + console.log(`[Incremental Update] Files to download: ${downloadTasks.length}`); 512 464 513 - // Copy unchanged files in parallel (fast local operations) - increased limit for better performance 514 - const copyLimit = 50; 515 - for (let i = 0; i < copyTasks.length; i += copyLimit) { 516 - const batch = copyTasks.slice(i, i + copyLimit); 517 - await Promise.all(batch.map(task => task())); 518 - if (copyTasks.length > copyLimit) { 519 - console.log(`[Cache Progress] Copied ${Math.min(i + copyLimit, copyTasks.length)}/${copyTasks.length} unchanged files`); 520 - } 521 - } 522 - 523 - // Download new/changed files concurrently - increased from 3 to 20 for much better performance 465 + // Download new/changed files concurrently 524 466 const downloadLimit = 20; 525 467 let successCount = 0; 526 468 let failureCount = 0; ··· 549 491 } 550 492 } 551 493 552 - /** 553 - * Copy an unchanged file from existing cache to new cache location 554 - */ 555 - async function copyExistingFile( 556 - did: string, 557 - site: string, 558 - filePath: string, 559 - dirSuffix: string, 560 - existingCacheDir: string 561 - ): Promise<void> { 562 - const { copyFile } = await import('fs/promises'); 563 - 564 - const sourceFile = `${existingCacheDir}/${filePath}`; 565 - const destFile = `${CACHE_DIR}/${did}/${site}${dirSuffix}/${filePath}`; 566 - const destDir = destFile.substring(0, destFile.lastIndexOf('/')); 567 - 568 - // Create destination directory if needed 569 - if (destDir && !existsSync(destDir)) { 570 - mkdirSync(destDir, { recursive: true }); 571 - } 572 - 573 - try { 574 - // Copy the file 575 - await copyFile(sourceFile, destFile); 576 - 577 - // Copy metadata file if it exists 578 - const sourceMetaFile = `${sourceFile}.meta`; 579 - const destMetaFile = `${destFile}.meta`; 580 - if (existsSync(sourceMetaFile)) { 581 - await copyFile(sourceMetaFile, destMetaFile); 582 - } 583 - } catch (err) { 584 - console.error(`Failed to copy cached file ${filePath}, will attempt download:`, err); 585 - throw err; 586 - } 587 - } 588 - 589 494 async function cacheFileBlob( 590 495 did: string, 591 496 site: string, ··· 594 499 pdsEndpoint: string, 595 500 encoding?: 'gzip', 596 501 mimeType?: string, 597 - base64?: boolean, 598 - dirSuffix: string = '' 502 + base64?: boolean 599 503 ): Promise<void> { 600 504 const cid = extractBlobCid(blobRef); 601 505 if (!cid) { ··· 635 539 } 636 540 } 637 541 638 - // Write to tiered storage via streaming (warm + cold, skip hot on ingest) 639 - // Convert buffer to stream for large file support 542 + // Write to tiered storage with metadata 640 543 const stream = Readable.from([content]); 641 544 const key = `${did}/${site}/${filePath}`; 642 545 ··· 676 579 return await storage.exists(indexKey); 677 580 } 678 581 679 - async function saveCacheMetadata(did: string, rkey: string, recordCid: string, dirSuffix: string = '', fileCids?: Record<string, string>, settings?: WispSettings | null): Promise<void> { 582 + async function saveCacheMetadata(did: string, rkey: string, recordCid: string, fileCids?: Record<string, string>, settings?: WispSettings | null): Promise<void> { 680 583 const metadata: CacheMetadata = { 681 584 recordCid, 682 585 cachedAt: Date.now(), ··· 686 589 settings: settings || undefined 687 590 }; 688 591 689 - const metadataPath = `${CACHE_DIR}/${did}/${rkey}${dirSuffix}/.metadata.json`; 690 - const metadataDir = metadataPath.substring(0, metadataPath.lastIndexOf('/')); 691 - 692 - if (!existsSync(metadataDir)) { 693 - mkdirSync(metadataDir, { recursive: true }); 694 - } 695 - 696 - await writeFile(metadataPath, JSON.stringify(metadata, null, 2)); 592 + // Store through tiered storage for persistence to S3/cold tier 593 + const metadataKey = `${did}/${rkey}/.metadata.json`; 594 + const metadataBytes = new TextEncoder().encode(JSON.stringify(metadata, null, 2)); 595 + await storage.set(metadataKey, metadataBytes); 697 596 } 698 597 699 598 async function getCacheMetadata(did: string, rkey: string): Promise<CacheMetadata | null> { 700 599 try { 701 - const metadataPath = `${CACHE_DIR}/${did}/${rkey}/.metadata.json`; 702 - if (!existsSync(metadataPath)) return null; 600 + // Retrieve metadata from tiered storage 601 + const metadataKey = `${did}/${rkey}/.metadata.json`; 602 + const data = await storage.get(metadataKey); 603 + 604 + if (!data) return null; 703 605 704 - const content = await readFile(metadataPath, 'utf-8'); 705 - return JSON.parse(content) as CacheMetadata; 606 + // Deserialize from Uint8Array to JSON (storage uses identity serialization) 607 + const jsonString = new TextDecoder().decode(data as Uint8Array); 608 + return JSON.parse(jsonString) as CacheMetadata; 706 609 } catch (err) { 707 610 console.error('Failed to read cache metadata', err); 708 611 return null; ··· 736 639 } 737 640 738 641 export async function updateCacheMetadataSettings(did: string, rkey: string, settings: WispSettings | null): Promise<void> { 739 - const metadataPath = `${CACHE_DIR}/${did}/${rkey}/.metadata.json`; 642 + try { 643 + // Read existing metadata from tiered storage 644 + const metadata = await getCacheMetadata(did, rkey); 740 645 741 - if (!existsSync(metadataPath)) { 742 - console.warn('Metadata file does not exist, cannot update settings', { did, rkey }); 743 - return; 744 - } 745 - 746 - try { 747 - // Read existing metadata 748 - const content = await readFile(metadataPath, 'utf-8'); 749 - const metadata = JSON.parse(content) as CacheMetadata; 646 + if (!metadata) { 647 + console.warn('Metadata does not exist, cannot update settings', { did, rkey }); 648 + return; 649 + } 750 650 751 651 // Update settings field 752 652 // Store null explicitly to cache "no settings" state and avoid repeated fetches 753 653 metadata.settings = settings ?? null; 754 654 755 - // Write back to disk 756 - await writeFile(metadataPath, JSON.stringify(metadata, null, 2), 'utf-8'); 655 + // Write back through tiered storage 656 + // Convert to Uint8Array since storage is typed for binary data 657 + const metadataKey = `${did}/${rkey}/.metadata.json`; 658 + const metadataBytes = new TextEncoder().encode(JSON.stringify(metadata, null, 2)); 659 + await storage.set(metadataKey, metadataBytes); 757 660 console.log('Updated metadata settings', { did, rkey, hasSettings: !!settings }); 758 661 } catch (err) { 759 662 console.error('Failed to update metadata settings', err);
+3
apps/hosting-service/src/server.ts
··· 80 80 return c.text('Invalid identifier', 400); 81 81 } 82 82 83 + console.log(`[Server] sites.wisp.place request: identifier=${identifier}, site=${site}, filePath=${filePath}`); 84 + 83 85 // Check if site is currently being cached - return updating response early 84 86 if (isSiteBeingCached(did, site)) { 85 87 return siteUpdatingResponse(); ··· 93 95 94 96 // Serve with HTML path rewriting to handle absolute paths 95 97 const basePath = `/${identifier}/${site}/`; 98 + console.log(`[Server] Serving with basePath: ${basePath}`); 96 99 const headers = extractHeaders(c.req.raw.headers); 97 100 return serveFromCacheWithRewrite(did, site, filePath, basePath, c.req.url, headers); 98 101 }
+59
backup.nix
··· 1 + { 2 + inputs.nixpkgs.url = "github:nixos/nixpkgs/nixpkgs-unstable"; 3 + inputs.nci.url = "github:90-008/nix-cargo-integration"; 4 + inputs.nci.inputs.nixpkgs.follows = "nixpkgs"; 5 + inputs.parts.url = "github:hercules-ci/flake-parts"; 6 + inputs.parts.inputs.nixpkgs-lib.follows = "nixpkgs"; 7 + inputs.fenix = { 8 + url = "github:nix-community/fenix"; 9 + inputs.nixpkgs.follows = "nixpkgs"; 10 + }; 11 + 12 + outputs = inputs @ { 13 + parts, 14 + nci, 15 + ... 16 + }: 17 + parts.lib.mkFlake {inherit inputs;} { 18 + systems = ["x86_64-linux" "aarch64-darwin"]; 19 + imports = [ 20 + nci.flakeModule 21 + ./crates.nix 22 + ]; 23 + perSystem = { 24 + pkgs, 25 + config, 26 + ... 27 + }: let 28 + crateOutputs = config.nci.outputs."wisp-cli"; 29 + mkRenamedPackage = name: pkg: isWindows: pkgs.runCommand name {} '' 30 + mkdir -p $out/bin 31 + if [ -f ${pkg}/bin/wisp-cli.exe ]; then 32 + cp ${pkg}/bin/wisp-cli.exe $out/bin/${name} 33 + elif [ -f ${pkg}/bin/wisp-cli ]; then 34 + cp ${pkg}/bin/wisp-cli $out/bin/${name} 35 + else 36 + echo "Error: Could not find wisp-cli binary in ${pkg}/bin/" 37 + ls -la ${pkg}/bin/ || true 38 + exit 1 39 + fi 40 + ''; 41 + in { 42 + devShells.default = crateOutputs.devShell; 43 + packages.default = crateOutputs.packages.release; 44 + packages.wisp-cli-x86_64-linux = mkRenamedPackage "wisp-cli-x86_64-linux" crateOutputs.packages.release false; 45 + packages.wisp-cli-aarch64-linux = mkRenamedPackage "wisp-cli-aarch64-linux" crateOutputs.allTargets."aarch64-unknown-linux-gnu".packages.release false; 46 + packages.wisp-cli-x86_64-windows = mkRenamedPackage "wisp-cli-x86_64-windows.exe" crateOutputs.allTargets."x86_64-pc-windows-gnu".packages.release true; 47 + packages.wisp-cli-aarch64-darwin = mkRenamedPackage "wisp-cli-aarch64-darwin" crateOutputs.allTargets."aarch64-apple-darwin".packages.release false; 48 + packages.all = pkgs.symlinkJoin { 49 + name = "wisp-cli-all"; 50 + paths = [ 51 + config.packages.wisp-cli-x86_64-linux 52 + config.packages.wisp-cli-aarch64-linux 53 + config.packages.wisp-cli-x86_64-windows 54 + config.packages.wisp-cli-aarch64-darwin 55 + ]; 56 + }; 57 + }; 58 + }; 59 + }
+13 -3
cli/default.nix
··· 1 - {rustPlatform}: 1 + { 2 + rustPlatform, 3 + glibc, 4 + }: 2 5 rustPlatform.buildRustPackage { 3 - name = "rust-cross-test"; 6 + name = "wisp-cli"; 4 7 src = ./.; 5 - cargoLock.lockFile = ./Cargo.lock; 8 + cargoLock = { 9 + lockFile = ./Cargo.lock; 10 + outputHashes = { 11 + "jacquard-0.9.5" = "sha256-75bas4VAYFcZAcBspSqS4vlJe8nmFn9ncTgeoT/OvnA="; 12 + }; 13 + }; 14 + buildInputs = [glibc.static]; 15 + RUSTFLAGS = ["-C" "target-feature=+crt-static"]; 6 16 }
+96
cli/flake.lock
··· 1 + { 2 + "nodes": { 3 + "flake-utils": { 4 + "inputs": { 5 + "systems": "systems" 6 + }, 7 + "locked": { 8 + "lastModified": 1731533236, 9 + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", 10 + "owner": "numtide", 11 + "repo": "flake-utils", 12 + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", 13 + "type": "github" 14 + }, 15 + "original": { 16 + "owner": "numtide", 17 + "repo": "flake-utils", 18 + "type": "github" 19 + } 20 + }, 21 + "nixpkgs": { 22 + "locked": { 23 + "lastModified": 1767640445, 24 + "narHash": "sha256-UWYqmD7JFBEDBHWYcqE6s6c77pWdcU/i+bwD6XxMb8A=", 25 + "owner": "NixOS", 26 + "repo": "nixpkgs", 27 + "rev": "9f0c42f8bc7151b8e7e5840fb3bd454ad850d8c5", 28 + "type": "github" 29 + }, 30 + "original": { 31 + "owner": "NixOS", 32 + "ref": "nixos-unstable", 33 + "repo": "nixpkgs", 34 + "type": "github" 35 + } 36 + }, 37 + "nixpkgs_2": { 38 + "locked": { 39 + "lastModified": 1744536153, 40 + "narHash": "sha256-awS2zRgF4uTwrOKwwiJcByDzDOdo3Q1rPZbiHQg/N38=", 41 + "owner": "NixOS", 42 + "repo": "nixpkgs", 43 + "rev": "18dd725c29603f582cf1900e0d25f9f1063dbf11", 44 + "type": "github" 45 + }, 46 + "original": { 47 + "owner": "NixOS", 48 + "ref": "nixpkgs-unstable", 49 + "repo": "nixpkgs", 50 + "type": "github" 51 + } 52 + }, 53 + "root": { 54 + "inputs": { 55 + "flake-utils": "flake-utils", 56 + "nixpkgs": "nixpkgs", 57 + "rust-overlay": "rust-overlay" 58 + } 59 + }, 60 + "rust-overlay": { 61 + "inputs": { 62 + "nixpkgs": "nixpkgs_2" 63 + }, 64 + "locked": { 65 + "lastModified": 1767667566, 66 + "narHash": "sha256-COy+yxZGuhQRVD1r4bWVgeFt1GB+IB1k5WRpDKbLfI8=", 67 + "owner": "oxalica", 68 + "repo": "rust-overlay", 69 + "rev": "056ce5b125ab32ffe78c7d3e394d9da44733c95e", 70 + "type": "github" 71 + }, 72 + "original": { 73 + "owner": "oxalica", 74 + "repo": "rust-overlay", 75 + "type": "github" 76 + } 77 + }, 78 + "systems": { 79 + "locked": { 80 + "lastModified": 1681028828, 81 + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 82 + "owner": "nix-systems", 83 + "repo": "default", 84 + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 85 + "type": "github" 86 + }, 87 + "original": { 88 + "owner": "nix-systems", 89 + "repo": "default", 90 + "type": "github" 91 + } 92 + } 93 + }, 94 + "root": "root", 95 + "version": 7 96 + }
+130 -21
cli/flake.nix
··· 1 1 { 2 2 inputs = { 3 - nixpkgs.url = "nixpkgs/nixos-unstable"; 3 + nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; 4 4 rust-overlay.url = "github:oxalica/rust-overlay"; 5 + flake-utils.url = "github:numtide/flake-utils"; 5 6 }; 6 7 7 - outputs = { 8 - self, 9 - nixpkgs, 10 - rust-overlay, 11 - }: let 12 - system = "aarch64-darwin"; 13 - overlays = [(import rust-overlay)]; 14 - pkgs = import nixpkgs { 15 - inherit overlays system; 16 - crossSystem = { 17 - config = "x86_64-unknown-linux-gnu"; 18 - rustc.config = "x86_64-unknown-linux-gnu"; 19 - }; 20 - }; 21 - in { 22 - packages.${system} = { 23 - default = self.outputs.packages.${system}.x86_64-linux-example; 24 - x86_64-linux-example = pkgs.callPackage ./. {}; 25 - }; 26 - }; 8 + outputs = { self, nixpkgs, rust-overlay, flake-utils }: 9 + flake-utils.lib.eachDefaultSystem (system: 10 + let 11 + overlays = [ (import rust-overlay) ]; 12 + pkgs = import nixpkgs { inherit system overlays; }; 13 + 14 + rustToolchain = pkgs.rust-bin.stable.latest.default.override { 15 + targets = [ 16 + "x86_64-unknown-linux-musl" 17 + "aarch64-unknown-linux-musl" 18 + "x86_64-apple-darwin" 19 + "aarch64-apple-darwin" 20 + ]; 21 + }; 22 + 23 + cargoLockConfig = { 24 + lockFile = ./Cargo.lock; 25 + outputHashes = { 26 + "jacquard-0.9.5" = "sha256-75bas4VAYFcZAcBspSqS4vlJe8nmFn9ncTgeoT/OvnA="; 27 + }; 28 + }; 29 + 30 + # Native build for current system 31 + native = pkgs.rustPlatform.buildRustPackage { 32 + pname = "wisp-cli"; 33 + version = "0.4.2"; 34 + src = ./.; 35 + cargoLock = cargoLockConfig; 36 + nativeBuildInputs = [ rustToolchain ]; 37 + }; 38 + 39 + # Cross-compilation targets (Linux from macOS needs zigbuild) 40 + linuxTargets = let 41 + zigbuildPkgs = pkgs; 42 + in { 43 + x86_64-linux = pkgs.stdenv.mkDerivation { 44 + pname = "wisp-cli-x86_64-linux"; 45 + version = "0.4.2"; 46 + src = ./.; 47 + 48 + nativeBuildInputs = [ 49 + rustToolchain 50 + pkgs.cargo-zigbuild 51 + pkgs.zig 52 + ]; 53 + 54 + buildPhase = '' 55 + export HOME=$(mktemp -d) 56 + cargo zigbuild --release --target x86_64-unknown-linux-musl 57 + ''; 58 + 59 + installPhase = '' 60 + mkdir -p $out/bin 61 + cp target/x86_64-unknown-linux-musl/release/wisp-cli $out/bin/ 62 + ''; 63 + 64 + # Skip Nix's cargo vendor - we use network 65 + dontConfigure = true; 66 + dontFixup = true; 67 + }; 68 + 69 + aarch64-linux = pkgs.stdenv.mkDerivation { 70 + pname = "wisp-cli-aarch64-linux"; 71 + version = "0.4.2"; 72 + src = ./.; 73 + 74 + nativeBuildInputs = [ 75 + rustToolchain 76 + pkgs.cargo-zigbuild 77 + pkgs.zig 78 + ]; 79 + 80 + buildPhase = '' 81 + export HOME=$(mktemp -d) 82 + cargo zigbuild --release --target aarch64-unknown-linux-musl 83 + ''; 84 + 85 + installPhase = '' 86 + mkdir -p $out/bin 87 + cp target/aarch64-unknown-linux-musl/release/wisp-cli $out/bin/ 88 + ''; 89 + 90 + dontConfigure = true; 91 + dontFixup = true; 92 + }; 93 + }; 94 + 95 + in { 96 + packages = { 97 + default = native; 98 + inherit native; 99 + 100 + # macOS universal binary 101 + macos-universal = pkgs.stdenv.mkDerivation { 102 + pname = "wisp-cli-macos-universal"; 103 + version = "0.4.2"; 104 + src = ./.; 105 + 106 + nativeBuildInputs = [ rustToolchain pkgs.darwin.lipo ]; 107 + 108 + buildPhase = '' 109 + export HOME=$(mktemp -d) 110 + cargo build --release --target aarch64-apple-darwin 111 + cargo build --release --target x86_64-apple-darwin 112 + ''; 113 + 114 + installPhase = '' 115 + mkdir -p $out/bin 116 + lipo -create \ 117 + target/aarch64-apple-darwin/release/wisp-cli \ 118 + target/x86_64-apple-darwin/release/wisp-cli \ 119 + -output $out/bin/wisp-cli 120 + ''; 121 + 122 + dontConfigure = true; 123 + dontFixup = true; 124 + }; 125 + } // (if pkgs.stdenv.isDarwin then linuxTargets else {}); 126 + 127 + devShells.default = pkgs.mkShell { 128 + buildInputs = [ 129 + rustToolchain 130 + pkgs.cargo-zigbuild 131 + pkgs.zig 132 + ]; 133 + }; 134 + } 135 + ); 27 136 }
-318
flake.lock
··· 1 - { 2 - "nodes": { 3 - "crane": { 4 - "flake": false, 5 - "locked": { 6 - "lastModified": 1758758545, 7 - "narHash": "sha256-NU5WaEdfwF6i8faJ2Yh+jcK9vVFrofLcwlD/mP65JrI=", 8 - "owner": "ipetkov", 9 - "repo": "crane", 10 - "rev": "95d528a5f54eaba0d12102249ce42f4d01f4e364", 11 - "type": "github" 12 - }, 13 - "original": { 14 - "owner": "ipetkov", 15 - "ref": "v0.21.1", 16 - "repo": "crane", 17 - "type": "github" 18 - } 19 - }, 20 - "dream2nix": { 21 - "inputs": { 22 - "nixpkgs": [ 23 - "nci", 24 - "nixpkgs" 25 - ], 26 - "purescript-overlay": "purescript-overlay", 27 - "pyproject-nix": "pyproject-nix" 28 - }, 29 - "locked": { 30 - "lastModified": 1754978539, 31 - "narHash": "sha256-nrDovydywSKRbWim9Ynmgj8SBm8LK3DI2WuhIqzOHYI=", 32 - "owner": "nix-community", 33 - "repo": "dream2nix", 34 - "rev": "fbec3263cb4895ac86ee9506cdc4e6919a1a2214", 35 - "type": "github" 36 - }, 37 - "original": { 38 - "owner": "nix-community", 39 - "repo": "dream2nix", 40 - "type": "github" 41 - } 42 - }, 43 - "fenix": { 44 - "inputs": { 45 - "nixpkgs": [ 46 - "nixpkgs" 47 - ], 48 - "rust-analyzer-src": "rust-analyzer-src" 49 - }, 50 - "locked": { 51 - "lastModified": 1762584108, 52 - "narHash": "sha256-wZUW7dlXMXaRdvNbaADqhF8gg9bAfFiMV+iyFQiDv+Y=", 53 - "owner": "nix-community", 54 - "repo": "fenix", 55 - "rev": "32f3ad3b6c690061173e1ac16708874975ec6056", 56 - "type": "github" 57 - }, 58 - "original": { 59 - "owner": "nix-community", 60 - "repo": "fenix", 61 - "type": "github" 62 - } 63 - }, 64 - "flake-compat": { 65 - "flake": false, 66 - "locked": { 67 - "lastModified": 1696426674, 68 - "narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=", 69 - "owner": "edolstra", 70 - "repo": "flake-compat", 71 - "rev": "0f9255e01c2351cc7d116c072cb317785dd33b33", 72 - "type": "github" 73 - }, 74 - "original": { 75 - "owner": "edolstra", 76 - "repo": "flake-compat", 77 - "type": "github" 78 - } 79 - }, 80 - "mk-naked-shell": { 81 - "flake": false, 82 - "locked": { 83 - "lastModified": 1681286841, 84 - "narHash": "sha256-3XlJrwlR0nBiREnuogoa5i1b4+w/XPe0z8bbrJASw0g=", 85 - "owner": "90-008", 86 - "repo": "mk-naked-shell", 87 - "rev": "7612f828dd6f22b7fb332cc69440e839d7ffe6bd", 88 - "type": "github" 89 - }, 90 - "original": { 91 - "owner": "90-008", 92 - "repo": "mk-naked-shell", 93 - "type": "github" 94 - } 95 - }, 96 - "nci": { 97 - "inputs": { 98 - "crane": "crane", 99 - "dream2nix": "dream2nix", 100 - "mk-naked-shell": "mk-naked-shell", 101 - "nixpkgs": [ 102 - "nixpkgs" 103 - ], 104 - "parts": "parts", 105 - "rust-overlay": "rust-overlay", 106 - "treefmt": "treefmt" 107 - }, 108 - "locked": { 109 - "lastModified": 1762582646, 110 - "narHash": "sha256-MMzE4xccG+8qbLhdaZoeFDUKWUOn3B4lhp5dZmgukmM=", 111 - "owner": "90-008", 112 - "repo": "nix-cargo-integration", 113 - "rev": "0993c449377049fa8868a664e8290ac6658e0b9a", 114 - "type": "github" 115 - }, 116 - "original": { 117 - "owner": "90-008", 118 - "repo": "nix-cargo-integration", 119 - "type": "github" 120 - } 121 - }, 122 - "nixpkgs": { 123 - "locked": { 124 - "lastModified": 1762361079, 125 - "narHash": "sha256-lz718rr1BDpZBYk7+G8cE6wee3PiBUpn8aomG/vLLiY=", 126 - "owner": "nixos", 127 - "repo": "nixpkgs", 128 - "rev": "ffcdcf99d65c61956d882df249a9be53e5902ea5", 129 - "type": "github" 130 - }, 131 - "original": { 132 - "owner": "nixos", 133 - "ref": "nixpkgs-unstable", 134 - "repo": "nixpkgs", 135 - "type": "github" 136 - } 137 - }, 138 - "parts": { 139 - "inputs": { 140 - "nixpkgs-lib": [ 141 - "nci", 142 - "nixpkgs" 143 - ] 144 - }, 145 - "locked": { 146 - "lastModified": 1762440070, 147 - "narHash": "sha256-xxdepIcb39UJ94+YydGP221rjnpkDZUlykKuF54PsqI=", 148 - "owner": "hercules-ci", 149 - "repo": "flake-parts", 150 - "rev": "26d05891e14c88eb4a5d5bee659c0db5afb609d8", 151 - "type": "github" 152 - }, 153 - "original": { 154 - "owner": "hercules-ci", 155 - "repo": "flake-parts", 156 - "type": "github" 157 - } 158 - }, 159 - "parts_2": { 160 - "inputs": { 161 - "nixpkgs-lib": [ 162 - "nixpkgs" 163 - ] 164 - }, 165 - "locked": { 166 - "lastModified": 1762440070, 167 - "narHash": "sha256-xxdepIcb39UJ94+YydGP221rjnpkDZUlykKuF54PsqI=", 168 - "owner": "hercules-ci", 169 - "repo": "flake-parts", 170 - "rev": "26d05891e14c88eb4a5d5bee659c0db5afb609d8", 171 - "type": "github" 172 - }, 173 - "original": { 174 - "owner": "hercules-ci", 175 - "repo": "flake-parts", 176 - "type": "github" 177 - } 178 - }, 179 - "purescript-overlay": { 180 - "inputs": { 181 - "flake-compat": "flake-compat", 182 - "nixpkgs": [ 183 - "nci", 184 - "dream2nix", 185 - "nixpkgs" 186 - ], 187 - "slimlock": "slimlock" 188 - }, 189 - "locked": { 190 - "lastModified": 1728546539, 191 - "narHash": "sha256-Sws7w0tlnjD+Bjck1nv29NjC5DbL6nH5auL9Ex9Iz2A=", 192 - "owner": "thomashoneyman", 193 - "repo": "purescript-overlay", 194 - "rev": "4ad4c15d07bd899d7346b331f377606631eb0ee4", 195 - "type": "github" 196 - }, 197 - "original": { 198 - "owner": "thomashoneyman", 199 - "repo": "purescript-overlay", 200 - "type": "github" 201 - } 202 - }, 203 - "pyproject-nix": { 204 - "inputs": { 205 - "nixpkgs": [ 206 - "nci", 207 - "dream2nix", 208 - "nixpkgs" 209 - ] 210 - }, 211 - "locked": { 212 - "lastModified": 1752481895, 213 - "narHash": "sha256-luVj97hIMpCbwhx3hWiRwjP2YvljWy8FM+4W9njDhLA=", 214 - "owner": "pyproject-nix", 215 - "repo": "pyproject.nix", 216 - "rev": "16ee295c25107a94e59a7fc7f2e5322851781162", 217 - "type": "github" 218 - }, 219 - "original": { 220 - "owner": "pyproject-nix", 221 - "repo": "pyproject.nix", 222 - "type": "github" 223 - } 224 - }, 225 - "root": { 226 - "inputs": { 227 - "fenix": "fenix", 228 - "nci": "nci", 229 - "nixpkgs": "nixpkgs", 230 - "parts": "parts_2" 231 - } 232 - }, 233 - "rust-analyzer-src": { 234 - "flake": false, 235 - "locked": { 236 - "lastModified": 1762438844, 237 - "narHash": "sha256-ApIKJf6CcMsV2nYBXhGF95BmZMO/QXPhgfSnkA/rVUo=", 238 - "owner": "rust-lang", 239 - "repo": "rust-analyzer", 240 - "rev": "4bf516ee5a960c1e2eee9fedd9b1c9e976a19c86", 241 - "type": "github" 242 - }, 243 - "original": { 244 - "owner": "rust-lang", 245 - "ref": "nightly", 246 - "repo": "rust-analyzer", 247 - "type": "github" 248 - } 249 - }, 250 - "rust-overlay": { 251 - "inputs": { 252 - "nixpkgs": [ 253 - "nci", 254 - "nixpkgs" 255 - ] 256 - }, 257 - "locked": { 258 - "lastModified": 1762569282, 259 - "narHash": "sha256-vINZAJpXQTZd5cfh06Rcw7hesH7sGSvi+Tn+HUieJn8=", 260 - "owner": "oxalica", 261 - "repo": "rust-overlay", 262 - "rev": "a35a6144b976f70827c2fe2f5c89d16d8f9179d8", 263 - "type": "github" 264 - }, 265 - "original": { 266 - "owner": "oxalica", 267 - "repo": "rust-overlay", 268 - "type": "github" 269 - } 270 - }, 271 - "slimlock": { 272 - "inputs": { 273 - "nixpkgs": [ 274 - "nci", 275 - "dream2nix", 276 - "purescript-overlay", 277 - "nixpkgs" 278 - ] 279 - }, 280 - "locked": { 281 - "lastModified": 1688756706, 282 - "narHash": "sha256-xzkkMv3neJJJ89zo3o2ojp7nFeaZc2G0fYwNXNJRFlo=", 283 - "owner": "thomashoneyman", 284 - "repo": "slimlock", 285 - "rev": "cf72723f59e2340d24881fd7bf61cb113b4c407c", 286 - "type": "github" 287 - }, 288 - "original": { 289 - "owner": "thomashoneyman", 290 - "repo": "slimlock", 291 - "type": "github" 292 - } 293 - }, 294 - "treefmt": { 295 - "inputs": { 296 - "nixpkgs": [ 297 - "nci", 298 - "nixpkgs" 299 - ] 300 - }, 301 - "locked": { 302 - "lastModified": 1762410071, 303 - "narHash": "sha256-aF5fvoZeoXNPxT0bejFUBXeUjXfHLSL7g+mjR/p5TEg=", 304 - "owner": "numtide", 305 - "repo": "treefmt-nix", 306 - "rev": "97a30861b13c3731a84e09405414398fbf3e109f", 307 - "type": "github" 308 - }, 309 - "original": { 310 - "owner": "numtide", 311 - "repo": "treefmt-nix", 312 - "type": "github" 313 - } 314 - } 315 - }, 316 - "root": "root", 317 - "version": 7 318 - }
-59
flake.nix
··· 1 - { 2 - inputs.nixpkgs.url = "github:nixos/nixpkgs/nixpkgs-unstable"; 3 - inputs.nci.url = "github:90-008/nix-cargo-integration"; 4 - inputs.nci.inputs.nixpkgs.follows = "nixpkgs"; 5 - inputs.parts.url = "github:hercules-ci/flake-parts"; 6 - inputs.parts.inputs.nixpkgs-lib.follows = "nixpkgs"; 7 - inputs.fenix = { 8 - url = "github:nix-community/fenix"; 9 - inputs.nixpkgs.follows = "nixpkgs"; 10 - }; 11 - 12 - outputs = inputs @ { 13 - parts, 14 - nci, 15 - ... 16 - }: 17 - parts.lib.mkFlake {inherit inputs;} { 18 - systems = ["x86_64-linux" "aarch64-darwin"]; 19 - imports = [ 20 - nci.flakeModule 21 - ./crates.nix 22 - ]; 23 - perSystem = { 24 - pkgs, 25 - config, 26 - ... 27 - }: let 28 - crateOutputs = config.nci.outputs."wisp-cli"; 29 - mkRenamedPackage = name: pkg: isWindows: pkgs.runCommand name {} '' 30 - mkdir -p $out/bin 31 - if [ -f ${pkg}/bin/wisp-cli.exe ]; then 32 - cp ${pkg}/bin/wisp-cli.exe $out/bin/${name} 33 - elif [ -f ${pkg}/bin/wisp-cli ]; then 34 - cp ${pkg}/bin/wisp-cli $out/bin/${name} 35 - else 36 - echo "Error: Could not find wisp-cli binary in ${pkg}/bin/" 37 - ls -la ${pkg}/bin/ || true 38 - exit 1 39 - fi 40 - ''; 41 - in { 42 - devShells.default = crateOutputs.devShell; 43 - packages.default = crateOutputs.packages.release; 44 - packages.wisp-cli-x86_64-linux = mkRenamedPackage "wisp-cli-x86_64-linux" crateOutputs.packages.release false; 45 - packages.wisp-cli-aarch64-linux = mkRenamedPackage "wisp-cli-aarch64-linux" crateOutputs.allTargets."aarch64-unknown-linux-gnu".packages.release false; 46 - packages.wisp-cli-x86_64-windows = mkRenamedPackage "wisp-cli-x86_64-windows.exe" crateOutputs.allTargets."x86_64-pc-windows-gnu".packages.release true; 47 - packages.wisp-cli-aarch64-darwin = mkRenamedPackage "wisp-cli-aarch64-darwin" crateOutputs.allTargets."aarch64-apple-darwin".packages.release false; 48 - packages.all = pkgs.symlinkJoin { 49 - name = "wisp-cli-all"; 50 - paths = [ 51 - config.packages.wisp-cli-x86_64-linux 52 - config.packages.wisp-cli-aarch64-linux 53 - config.packages.wisp-cli-x86_64-windows 54 - config.packages.wisp-cli-aarch64-darwin 55 - ]; 56 - }; 57 - }; 58 - }; 59 - }