fork of indigo with slightly nicer lexgen

rainbow: improve nextCrawler arg parsing

Changed files
+16 -11
splitter
+16 -11
splitter/splitter.go
··· 44 44 45 45 upstreamClient *http.Client 46 46 peerClient *http.Client 47 - nextCrawlers []*url.URL 47 + nextCrawlers []url.URL 48 48 } 49 49 50 50 type SplitterConfig struct { ··· 105 105 logger = slog.Default().With("system", "splitter") 106 106 } 107 107 108 - var nextCrawlerURLs []*url.URL 109 - if len(nextCrawlers) > 0 { 110 - nextCrawlerURLs = make([]*url.URL, len(nextCrawlers)) 111 - for i, tu := range nextCrawlers { 112 - var err error 113 - nextCrawlerURLs[i], err = url.Parse(tu) 114 - if err != nil { 115 - return nil, fmt.Errorf("failed to parse next-crawler url: %w", err) 116 - } 117 - logger.Info("configuring relay for requestCrawl", "host", nextCrawlerURLs[i]) 108 + var nextCrawlerURLs []url.URL 109 + for _, raw := range nextCrawlers { 110 + if raw == "" { 111 + continue 112 + } 113 + u, err := url.Parse(raw) 114 + if err != nil { 115 + return nil, fmt.Errorf("failed to parse next-crawler url: %w", err) 116 + } 117 + if u.Host == "" { 118 + return nil, fmt.Errorf("empty URL host for next crawler: %s", raw) 118 119 } 120 + nextCrawlerURLs = append(nextCrawlerURLs, *u) 121 + } 122 + if len(nextCrawlerURLs) > 0 { 123 + logger.Info("configured crawler forwarding", "crawlers", nextCrawlerURLs) 119 124 } 120 125 121 126 _, err := url.Parse(conf.UpstreamHostHTTP())