Compare commits
2 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2cb89d3c54 | ||
|
|
ba3b73c3dd |
10
CHANGELOG.md
10
CHANGELOG.md
@@ -4,6 +4,15 @@ All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/), and this project adheres to [Semantic Versioning](https://semver.org/).
|
||||
|
||||
## [0.5.0] — 2026-03-21
|
||||
|
||||
### Fixed
|
||||
- **Cloudflare 403 bypass** — API requests blocked by Cloudflare JS challenges now fall back to headless Chrome with the persistent browser profile, which can solve the challenges natively
|
||||
|
||||
### Added
|
||||
- `internal/browser/fetch.go` — headless Chrome API fetcher using chromedp with the existing browser profile (reuses Cloudflare clearance cookies)
|
||||
- `fetchWithFallback()` in fetcher — tries plain HTTP first, falls back to headless Chrome on 403
|
||||
|
||||
## [0.3.0] — 2026-02-26
|
||||
|
||||
Full rewrite from Node.js + Python to Go. Each platform gets a single static binary — no runtime dependencies.
|
||||
@@ -44,5 +53,6 @@ First tagged release. Includes the CLI statusline, standalone usage fetcher, cro
|
||||
- Tray icon visibility — switched to Claude orange with full opacity at larger size
|
||||
- Block comment syntax error in cron example
|
||||
|
||||
[0.5.0]: https://git.davoryn.de/calic/claude-statusline/releases/tag/v0.5.0
|
||||
[0.3.0]: https://git.davoryn.de/calic/claude-statusline/releases/tag/v0.3.0
|
||||
[0.2.0]: https://git.davoryn.de/calic/claude-statusline/releases/tag/v0.2.0
|
||||
|
||||
122
internal/browser/fetch.go
Normal file
122
internal/browser/fetch.go
Normal file
@@ -0,0 +1,122 @@
|
||||
package browser
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/chromedp/cdproto/network"
|
||||
"github.com/chromedp/chromedp"
|
||||
|
||||
"git.davoryn.de/calic/claude-statusline/internal/config"
|
||||
)
|
||||
|
||||
// FetchViaChrome navigates to a URL using Chrome with the persistent browser
|
||||
// profile (which has Cloudflare clearance cookies) and returns the response
|
||||
// body. Uses non-headless mode with a minimized/hidden window to avoid
|
||||
// Cloudflare's headless detection, which causes infinite challenge loops.
|
||||
func FetchViaChrome(url string) ([]byte, error) {
|
||||
profileDir := filepath.Join(config.ConfigDir(), "browser-profile")
|
||||
if err := os.MkdirAll(profileDir, 0o755); err != nil {
|
||||
return nil, fmt.Errorf("create browser profile dir: %w", err)
|
||||
}
|
||||
|
||||
// Remove stale lock file from unclean shutdown
|
||||
_ = os.Remove(filepath.Join(profileDir, "SingletonLock"))
|
||||
|
||||
execPath := findBrowserExec()
|
||||
|
||||
// Use non-headless mode: Cloudflare detects headless Chrome and loops
|
||||
// the JS challenge forever. A real (but hidden) browser window passes.
|
||||
opts := append(chromedp.DefaultExecAllocatorOptions[:],
|
||||
chromedp.Flag("headless", false),
|
||||
chromedp.Flag("window-position", "-32000,-32000"), // off-screen
|
||||
chromedp.Flag("window-size", "1,1"),
|
||||
chromedp.Flag("disable-gpu", true),
|
||||
chromedp.Flag("no-first-run", true),
|
||||
chromedp.Flag("disable-extensions", true),
|
||||
chromedp.UserDataDir(profileDir),
|
||||
)
|
||||
if execPath != "" {
|
||||
opts = append(opts, chromedp.ExecPath(execPath))
|
||||
}
|
||||
|
||||
allocCtx, allocCancel := chromedp.NewExecAllocator(context.Background(), opts...)
|
||||
defer allocCancel()
|
||||
|
||||
ctx, cancel := chromedp.NewContext(allocCtx)
|
||||
defer cancel()
|
||||
|
||||
// Total timeout for the operation
|
||||
ctx, timeoutCancel := context.WithTimeout(ctx, 30*time.Second)
|
||||
defer timeoutCancel()
|
||||
|
||||
// Navigate and wait for Cloudflare challenge to resolve.
|
||||
// Poll the page content until we get valid JSON (not the challenge page).
|
||||
if err := chromedp.Run(ctx, chromedp.Navigate(url)); err != nil {
|
||||
return nil, fmt.Errorf("chromedp navigate: %w", err)
|
||||
}
|
||||
|
||||
// Poll for JSON response — Cloudflare challenge takes a few seconds to clear
|
||||
ticker := time.NewTicker(1 * time.Second)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil, fmt.Errorf("chromedp fetch timed out waiting for JSON response")
|
||||
case <-ticker.C:
|
||||
var body string
|
||||
// Try <pre> first (Chrome wraps JSON in <pre> tags)
|
||||
err := chromedp.Run(ctx,
|
||||
chromedp.Text("pre", &body, chromedp.ByQuery),
|
||||
)
|
||||
if err != nil || body == "" {
|
||||
// Fallback: try body directly
|
||||
_ = chromedp.Run(ctx,
|
||||
chromedp.Text("body", &body, chromedp.ByQuery),
|
||||
)
|
||||
}
|
||||
body = strings.TrimSpace(body)
|
||||
if body == "" {
|
||||
continue
|
||||
}
|
||||
// Check if we got actual JSON (starts with [ or {), not a challenge page
|
||||
if body[0] == '[' || body[0] == '{' {
|
||||
// Also extract any fresh cookies for future plain HTTP attempts
|
||||
_ = extractAndSaveCookies(ctx)
|
||||
cancel() // graceful close, flushes cookies to profile
|
||||
return []byte(body), nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// extractAndSaveCookies saves cf_clearance and other Cloudflare cookies
|
||||
// alongside the session key, so plain HTTP requests can try them next time.
|
||||
func extractAndSaveCookies(ctx context.Context) error {
|
||||
cookies, err := network.GetCookies().Do(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var parts []string
|
||||
for _, c := range cookies {
|
||||
if c.Domain == ".claude.ai" || c.Domain == "claude.ai" {
|
||||
if c.Name == "cf_clearance" || c.Name == "__cf_bm" || c.Name == "_cfuvid" {
|
||||
parts = append(parts, c.Name+"="+c.Value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(parts) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Write Cloudflare cookies to a file the fetcher can read
|
||||
cfPath := filepath.Join(config.ConfigDir(), "cf-cookies")
|
||||
return os.WriteFile(cfPath, []byte(strings.Join(parts, "\n")+"\n"), 0o600)
|
||||
}
|
||||
@@ -4,11 +4,16 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"math"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"git.davoryn.de/calic/claude-statusline/internal/browser"
|
||||
"git.davoryn.de/calic/claude-statusline/internal/config"
|
||||
)
|
||||
|
||||
@@ -32,13 +37,21 @@ type ParsedUsage struct {
|
||||
type UpdateCallback func(ParsedUsage)
|
||||
|
||||
// doRequest performs an authenticated HTTP GET to the Claude API.
|
||||
// Includes any saved Cloudflare cookies from previous Chrome fallbacks.
|
||||
func doRequest(url, sessionKey string) ([]byte, int, error) {
|
||||
client := &http.Client{Timeout: 10 * time.Second}
|
||||
req, err := http.NewRequest("GET", url, nil)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
req.Header.Set("Cookie", "sessionKey="+sessionKey)
|
||||
|
||||
cookie := "sessionKey=" + sessionKey
|
||||
// Append Cloudflare cookies if available (saved by Chrome fallback)
|
||||
if cfCookies := loadCFCookies(); cfCookies != "" {
|
||||
cookie += "; " + cfCookies
|
||||
}
|
||||
|
||||
req.Header.Set("Cookie", cookie)
|
||||
req.Header.Set("User-Agent", userAgent)
|
||||
req.Header.Set("Accept", "application/json")
|
||||
req.Header.Set("Referer", "https://claude.ai/")
|
||||
@@ -56,17 +69,54 @@ func doRequest(url, sessionKey string) ([]byte, int, error) {
|
||||
return body, resp.StatusCode, nil
|
||||
}
|
||||
|
||||
// loadCFCookies reads saved Cloudflare cookies from the cf-cookies file.
|
||||
func loadCFCookies() string {
|
||||
data, err := os.ReadFile(filepath.Join(config.ConfigDir(), "cf-cookies"))
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
// File has one cookie per line (name=value), join with "; "
|
||||
lines := strings.Split(strings.TrimSpace(string(data)), "\n")
|
||||
var valid []string
|
||||
for _, l := range lines {
|
||||
l = strings.TrimSpace(l)
|
||||
if l != "" {
|
||||
valid = append(valid, l)
|
||||
}
|
||||
}
|
||||
return strings.Join(valid, "; ")
|
||||
}
|
||||
|
||||
// fetchWithFallback tries a plain HTTP request first, then falls back to
|
||||
// headless Chrome (which can solve Cloudflare JS challenges) on 403.
|
||||
func fetchWithFallback(url, sessionKey string) ([]byte, error) {
|
||||
body, status, err := doRequest(url, sessionKey)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("request failed: %w", err)
|
||||
}
|
||||
if status == 200 {
|
||||
return body, nil
|
||||
}
|
||||
if status == 401 {
|
||||
return nil, fmt.Errorf("auth_expired")
|
||||
}
|
||||
if status == 403 {
|
||||
// Likely a Cloudflare JS challenge — fall back to headless Chrome
|
||||
log.Printf("HTTP 403 for %s, falling back to headless Chrome", url)
|
||||
chromeBody, chromeErr := browser.FetchViaChrome(url)
|
||||
if chromeErr != nil {
|
||||
return nil, fmt.Errorf("auth_expired") // treat as auth failure if Chrome also fails
|
||||
}
|
||||
return chromeBody, nil
|
||||
}
|
||||
return nil, fmt.Errorf("HTTP %d", status)
|
||||
}
|
||||
|
||||
// DiscoverOrgID fetches the first organization UUID from the API.
|
||||
func DiscoverOrgID(sessionKey string) (string, error) {
|
||||
body, status, err := doRequest(apiBase+"/api/organizations", sessionKey)
|
||||
body, err := fetchWithFallback(apiBase+"/api/organizations", sessionKey)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("request failed: %w", err)
|
||||
}
|
||||
if status == 401 || status == 403 {
|
||||
return "", fmt.Errorf("auth_expired")
|
||||
}
|
||||
if status != 200 {
|
||||
return "", fmt.Errorf("HTTP %d", status)
|
||||
return "", err
|
||||
}
|
||||
|
||||
var orgs []struct {
|
||||
@@ -96,16 +146,13 @@ func FetchUsage(sessionKey, orgID string) (*CacheData, string, error) {
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/api/organizations/%s/usage", apiBase, orgID)
|
||||
body, status, err := doRequest(url, sessionKey)
|
||||
body, err := fetchWithFallback(url, sessionKey)
|
||||
if err != nil {
|
||||
if err.Error() == "auth_expired" {
|
||||
return &CacheData{Error: "auth_expired", Status: 403}, orgID, err
|
||||
}
|
||||
return &CacheData{Error: "fetch_failed", Message: err.Error()}, orgID, err
|
||||
}
|
||||
if status == 401 || status == 403 {
|
||||
return &CacheData{Error: "auth_expired", Status: status}, orgID, fmt.Errorf("auth_expired")
|
||||
}
|
||||
if status != 200 {
|
||||
return &CacheData{Error: "api_error", Status: status}, orgID, fmt.Errorf("HTTP %d", status)
|
||||
}
|
||||
|
||||
var data CacheData
|
||||
if err := json.Unmarshal(body, &data); err != nil {
|
||||
|
||||
Reference in New Issue
Block a user