4 Commits

Author SHA1 Message Date
calic
a11bdb7267 Stealth mode: defeat Cloudflare automation detection
Some checks failed
Release / build (push) Failing after 1m5s
Drop DefaultExecAllocatorOptions (includes --enable-automation),
add disable-blink-features=AutomationControlled, patch
navigator.webdriver via JS, and inject sessionKey cookie via CDP
into a dedicated fetch profile.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 00:42:03 +01:00
calic
5abdee06ff Add diagnostic logging to Chrome fallback
All checks were successful
Release / build (push) Successful in 1m32s
Log navigation, polling state, and response snippets so we can
diagnose whether the fallback fails due to Cloudflare challenge,
login redirect, profile lock, or other issues.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 00:34:41 +01:00
calic
2cb89d3c54 Fix Cloudflare headless detection: use non-headless with hidden window
All checks were successful
Release / build (push) Successful in 1m37s
Cloudflare detects headless Chrome and loops the JS challenge forever.
Switch to non-headless mode with an off-screen window. Also save
Cloudflare cookies (cf_clearance, __cf_bm) after Chrome fallback so
subsequent plain HTTP requests can reuse them.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 00:26:34 +01:00
calic
ba3b73c3dd Add Cloudflare 403 fallback via headless Chrome
All checks were successful
Release / build (push) Successful in 1m40s
Plain HTTP requests to claude.ai get blocked by Cloudflare JS challenges
(403). The fetcher now falls back to headless Chrome using the persistent
browser profile, which can solve challenges natively and reuses existing
cf_clearance cookies.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 00:13:21 +01:00
3 changed files with 247 additions and 16 deletions

View File

@@ -4,6 +4,15 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/), and this project adheres to [Semantic Versioning](https://semver.org/). The format is based on [Keep a Changelog](https://keepachangelog.com/), and this project adheres to [Semantic Versioning](https://semver.org/).
## [0.5.0] — 2026-03-21
### Fixed
- **Cloudflare 403 bypass** — API requests blocked by Cloudflare JS challenges now fall back to headless Chrome with the persistent browser profile, which can solve the challenges natively
### Added
- `internal/browser/fetch.go` — headless Chrome API fetcher using chromedp with the existing browser profile (reuses Cloudflare clearance cookies)
- `fetchWithFallback()` in fetcher — tries plain HTTP first, falls back to headless Chrome on 403
## [0.3.0] — 2026-02-26 ## [0.3.0] — 2026-02-26
Full rewrite from Node.js + Python to Go. Each platform gets a single static binary — no runtime dependencies. Full rewrite from Node.js + Python to Go. Each platform gets a single static binary — no runtime dependencies.
@@ -44,5 +53,6 @@ First tagged release. Includes the CLI statusline, standalone usage fetcher, cro
- Tray icon visibility — switched to Claude orange with full opacity at larger size - Tray icon visibility — switched to Claude orange with full opacity at larger size
- Block comment syntax error in cron example - Block comment syntax error in cron example
[0.5.0]: https://git.davoryn.de/calic/claude-statusline/releases/tag/v0.5.0
[0.3.0]: https://git.davoryn.de/calic/claude-statusline/releases/tag/v0.3.0 [0.3.0]: https://git.davoryn.de/calic/claude-statusline/releases/tag/v0.3.0
[0.2.0]: https://git.davoryn.de/calic/claude-statusline/releases/tag/v0.2.0 [0.2.0]: https://git.davoryn.de/calic/claude-statusline/releases/tag/v0.2.0

172
internal/browser/fetch.go Normal file
View File

@@ -0,0 +1,172 @@
package browser
import (
"context"
"fmt"
"log"
"os"
"path/filepath"
"strings"
"time"
"github.com/chromedp/cdproto/network"
"github.com/chromedp/chromedp"
"git.davoryn.de/calic/claude-statusline/internal/config"
)
// FetchViaChrome navigates to a URL using Chrome with a dedicated browser
// profile and returns the response body. Uses stealth flags to avoid
// Cloudflare's automation detection (navigator.webdriver, etc.).
func FetchViaChrome(url string) ([]byte, error) {
// Use a dedicated fetch profile separate from the login profile.
profileDir := filepath.Join(config.ConfigDir(), "fetch-profile")
if err := os.MkdirAll(profileDir, 0o755); err != nil {
return nil, fmt.Errorf("create fetch profile dir: %w", err)
}
_ = os.Remove(filepath.Join(profileDir, "SingletonLock"))
execPath := findBrowserExec()
// Start with minimal options — NOT DefaultExecAllocatorOptions, which
// includes flags that Cloudflare detects (--enable-automation, etc.).
opts := []chromedp.ExecAllocatorOption{
chromedp.NoFirstRun,
chromedp.NoDefaultBrowserCheck,
chromedp.UserDataDir(profileDir),
// Stealth: disable automation indicators
chromedp.Flag("disable-blink-features", "AutomationControlled"),
chromedp.Flag("enable-automation", false),
chromedp.Flag("disable-infobars", true),
// Window: off-screen so it doesn't flash
chromedp.Flag("window-position", "-32000,-32000"),
chromedp.Flag("window-size", "1,1"),
chromedp.Flag("disable-gpu", true),
chromedp.Flag("disable-extensions", true),
chromedp.Flag("no-first-run", true),
// Use non-headless — Cloudflare detects headless mode
chromedp.Flag("headless", false),
}
if execPath != "" {
opts = append(opts, chromedp.ExecPath(execPath))
}
allocCtx, allocCancel := chromedp.NewExecAllocator(context.Background(), opts...)
defer allocCancel()
ctx, cancel := chromedp.NewContext(allocCtx)
defer cancel()
ctx, timeoutCancel := context.WithTimeout(ctx, 30*time.Second)
defer timeoutCancel()
// Inject the session key cookie before navigating
sessionKey := config.GetSessionKey()
if sessionKey != "" {
err := chromedp.Run(ctx,
chromedp.Navigate("about:blank"),
setCookieAction("sessionKey", sessionKey, ".claude.ai"),
)
if err != nil {
log.Printf("chrome-fetch: cookie injection failed: %v", err)
}
}
// Patch navigator.webdriver to false via CDP
err := chromedp.Run(ctx,
chromedp.ActionFunc(func(ctx context.Context) error {
_, _, _, err := chromedp.Targets(ctx)
_ = err
return nil
}),
chromedp.Evaluate(`Object.defineProperty(navigator, 'webdriver', {get: () => undefined})`, nil),
)
if err != nil {
log.Printf("chrome-fetch: webdriver patch failed: %v", err)
}
log.Printf("chrome-fetch: navigating to %s (exec: %q)", url, execPath)
if err := chromedp.Run(ctx, chromedp.Navigate(url)); err != nil {
log.Printf("chrome-fetch: navigate failed: %v", err)
return nil, fmt.Errorf("chromedp navigate: %w", err)
}
log.Printf("chrome-fetch: navigation complete, polling for JSON...")
// Poll for JSON response — Cloudflare challenge takes a few seconds
ticker := time.NewTicker(1 * time.Second)
defer ticker.Stop()
for {
select {
case <-ctx.Done():
return nil, fmt.Errorf("chromedp fetch timed out waiting for JSON response")
case <-ticker.C:
var body string
err := chromedp.Run(ctx,
chromedp.Text("pre", &body, chromedp.ByQuery),
)
if err != nil || body == "" {
_ = chromedp.Run(ctx,
chromedp.Text("body", &body, chromedp.ByQuery),
)
}
body = strings.TrimSpace(body)
if body == "" {
log.Printf("chrome-fetch: page body empty, waiting...")
continue
}
if body[0] == '[' || body[0] == '{' {
log.Printf("chrome-fetch: got JSON response (%d bytes)", len(body))
_ = extractAndSaveCookies(ctx)
cancel()
return []byte(body), nil
}
snippet := body
if len(snippet) > 200 {
snippet = snippet[:200]
}
log.Printf("chrome-fetch: non-JSON body (%d bytes): %s", len(body), snippet)
}
}
}
// setCookieAction sets a cookie via the DevTools protocol.
func setCookieAction(name, value, domain string) chromedp.Action {
return chromedp.ActionFunc(func(ctx context.Context) error {
expr := network.SetCookie(name, value).
WithDomain(domain).
WithPath("/").
WithHTTPOnly(true).
WithSecure(true)
return expr.Do(ctx)
})
}
// extractAndSaveCookies saves cf_clearance and other Cloudflare cookies
// so plain HTTP requests can try them on subsequent polls.
func extractAndSaveCookies(ctx context.Context) error {
cookies, err := network.GetCookies().Do(ctx)
if err != nil {
return err
}
var parts []string
for _, c := range cookies {
if c.Domain == ".claude.ai" || c.Domain == "claude.ai" {
if c.Name == "cf_clearance" || c.Name == "__cf_bm" || c.Name == "_cfuvid" {
parts = append(parts, c.Name+"="+c.Value)
}
}
}
if len(parts) == 0 {
return nil
}
cfPath := filepath.Join(config.ConfigDir(), "cf-cookies")
return os.WriteFile(cfPath, []byte(strings.Join(parts, "\n")+"\n"), 0o600)
}

View File

@@ -4,11 +4,16 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
"log"
"math" "math"
"net/http" "net/http"
"os"
"path/filepath"
"strings"
"sync" "sync"
"time" "time"
"git.davoryn.de/calic/claude-statusline/internal/browser"
"git.davoryn.de/calic/claude-statusline/internal/config" "git.davoryn.de/calic/claude-statusline/internal/config"
) )
@@ -32,13 +37,21 @@ type ParsedUsage struct {
type UpdateCallback func(ParsedUsage) type UpdateCallback func(ParsedUsage)
// doRequest performs an authenticated HTTP GET to the Claude API. // doRequest performs an authenticated HTTP GET to the Claude API.
// Includes any saved Cloudflare cookies from previous Chrome fallbacks.
func doRequest(url, sessionKey string) ([]byte, int, error) { func doRequest(url, sessionKey string) ([]byte, int, error) {
client := &http.Client{Timeout: 10 * time.Second} client := &http.Client{Timeout: 10 * time.Second}
req, err := http.NewRequest("GET", url, nil) req, err := http.NewRequest("GET", url, nil)
if err != nil { if err != nil {
return nil, 0, err return nil, 0, err
} }
req.Header.Set("Cookie", "sessionKey="+sessionKey)
cookie := "sessionKey=" + sessionKey
// Append Cloudflare cookies if available (saved by Chrome fallback)
if cfCookies := loadCFCookies(); cfCookies != "" {
cookie += "; " + cfCookies
}
req.Header.Set("Cookie", cookie)
req.Header.Set("User-Agent", userAgent) req.Header.Set("User-Agent", userAgent)
req.Header.Set("Accept", "application/json") req.Header.Set("Accept", "application/json")
req.Header.Set("Referer", "https://claude.ai/") req.Header.Set("Referer", "https://claude.ai/")
@@ -56,17 +69,56 @@ func doRequest(url, sessionKey string) ([]byte, int, error) {
return body, resp.StatusCode, nil return body, resp.StatusCode, nil
} }
// loadCFCookies reads saved Cloudflare cookies from the cf-cookies file.
func loadCFCookies() string {
data, err := os.ReadFile(filepath.Join(config.ConfigDir(), "cf-cookies"))
if err != nil {
return ""
}
// File has one cookie per line (name=value), join with "; "
lines := strings.Split(strings.TrimSpace(string(data)), "\n")
var valid []string
for _, l := range lines {
l = strings.TrimSpace(l)
if l != "" {
valid = append(valid, l)
}
}
return strings.Join(valid, "; ")
}
// fetchWithFallback tries a plain HTTP request first, then falls back to
// headless Chrome (which can solve Cloudflare JS challenges) on 403.
func fetchWithFallback(url, sessionKey string) ([]byte, error) {
body, status, err := doRequest(url, sessionKey)
if err != nil {
return nil, fmt.Errorf("request failed: %w", err)
}
if status == 200 {
return body, nil
}
if status == 401 {
return nil, fmt.Errorf("auth_expired")
}
if status == 403 {
// Likely a Cloudflare JS challenge — fall back to headless Chrome
log.Printf("HTTP 403 for %s, falling back to Chrome", url)
chromeBody, chromeErr := browser.FetchViaChrome(url)
if chromeErr != nil {
log.Printf("Chrome fallback failed: %v", chromeErr)
return nil, fmt.Errorf("auth_expired")
}
log.Printf("Chrome fallback succeeded (%d bytes)", len(chromeBody))
return chromeBody, nil
}
return nil, fmt.Errorf("HTTP %d", status)
}
// DiscoverOrgID fetches the first organization UUID from the API. // DiscoverOrgID fetches the first organization UUID from the API.
func DiscoverOrgID(sessionKey string) (string, error) { func DiscoverOrgID(sessionKey string) (string, error) {
body, status, err := doRequest(apiBase+"/api/organizations", sessionKey) body, err := fetchWithFallback(apiBase+"/api/organizations", sessionKey)
if err != nil { if err != nil {
return "", fmt.Errorf("request failed: %w", err) return "", err
}
if status == 401 || status == 403 {
return "", fmt.Errorf("auth_expired")
}
if status != 200 {
return "", fmt.Errorf("HTTP %d", status)
} }
var orgs []struct { var orgs []struct {
@@ -96,16 +148,13 @@ func FetchUsage(sessionKey, orgID string) (*CacheData, string, error) {
} }
url := fmt.Sprintf("%s/api/organizations/%s/usage", apiBase, orgID) url := fmt.Sprintf("%s/api/organizations/%s/usage", apiBase, orgID)
body, status, err := doRequest(url, sessionKey) body, err := fetchWithFallback(url, sessionKey)
if err != nil { if err != nil {
if err.Error() == "auth_expired" {
return &CacheData{Error: "auth_expired", Status: 403}, orgID, err
}
return &CacheData{Error: "fetch_failed", Message: err.Error()}, orgID, err return &CacheData{Error: "fetch_failed", Message: err.Error()}, orgID, err
} }
if status == 401 || status == 403 {
return &CacheData{Error: "auth_expired", Status: status}, orgID, fmt.Errorf("auth_expired")
}
if status != 200 {
return &CacheData{Error: "api_error", Status: status}, orgID, fmt.Errorf("HTTP %d", status)
}
var data CacheData var data CacheData
if err := json.Unmarshal(body, &data); err != nil { if err := json.Unmarshal(body, &data); err != nil {