circuits examples

Common Patterns
More specific examples

Full script using circuits

A web scraper with rate limiting and failure handling.

"""
In-memory scraper with circuit breakers for rate limiting and failures.

Uses two circuits:
- Circuit for rate limiting (auto-recovers after cooldown)
- BreakingCircuit for failures (stops after too many errors)
"""

import asyncio
import json
import hashlib
from dataclasses import dataclass
from suitkaise import Circuit, BreakingCircuit


@dataclass
class ScrapeResult:
    """Result of scraping a single URL."""
    url: str
    status: str  # "success", "rate_limited", "server_error", "client_error", "skipped"
    data: dict | None = None
    error: str | None = None


class WebScraper:
    """Scraper with circuit breaker protection (in-memory data)."""
    
    def __init__(
        self,
        data_store: dict[str, tuple[int, dict]],
        max_rate_limits: int = 10,
        max_failures: int = 5,
        rate_limit_sleep: float = 2.0,
        failure_sleep: float = 1.0,
    ):
        self.data_store = data_store
        # CIRCUIT 1: rate limiting
        self.rate_limiter = Circuit(
            num_shorts_to_trip=max_rate_limits,
            sleep_time_after_trip=rate_limit_sleep,
            backoff_factor=1.5,
            max_sleep_time=30.0,
            jitter=0.2
        )
        # CIRCUIT 2: failure handling
        self.failure_circuit = BreakingCircuit(
            num_shorts_to_trip=max_failures,
            sleep_time_after_trip=failure_sleep,
            backoff_factor=2.0,
            max_sleep_time=60.0,
            jitter=0.1
        )
    
    async def scrape(self, urls: list[str]) -> list[ScrapeResult]:
        """Scrape multiple URLs with circuit breaker protection."""
        results = []
        for url in urls:
            if self.failure_circuit.broken:
                results.append(ScrapeResult(
                    url=url,
                    status="skipped",
                    error="Too many failures, circuit broken"
                ))
                continue
            result = await self._scrape_url(url)
            results.append(result)
        return results
    
    async def _scrape_url(self, url: str) -> ScrapeResult:
        """Scrape a single URL with error handling."""
        status, payload = self.data_store[url]
        
        # perform real work regardless of status
        data_bytes = json.dumps(payload).encode()
        digest = hashlib.sha256(data_bytes).hexdigest()
        
        if status == 429:
            await self.rate_limiter.short.asynced()()
            return ScrapeResult(url=url, status="rate_limited")
        
        if status >= 500:
            await self.failure_circuit.short.asynced()()
            return ScrapeResult(url=url, status="server_error", error=f"HTTP {status}")
        
        if status >= 400:
            return ScrapeResult(url=url, status="client_error", error=f"HTTP {status}")
        
        # success: parse and return with hash
        data = json.loads(data_bytes)
        data["hash"] = digest[:8]
        return ScrapeResult(url=url, status="success", data=data)
    
    def get_stats(self) -> dict:
        """Get current circuit statistics."""
        return {
            "rate_limit_trips": self.rate_limiter.total_trips,
            "rate_limit_sleep": self.rate_limiter.current_sleep_time,
            "failure_trips": self.failure_circuit.total_trips,
            "failure_circuit_broken": self.failure_circuit.broken,
        }
    
    def reset(self):
        """Reset circuits for a new batch of URLs."""
        self.failure_circuit.reset()
        self.rate_limiter.reset_backoff()
        self.failure_circuit.reset_backoff()


async def main():
    # in-memory responses: url -> (status_code, payload)
    data_store = {
        "mem://users/1": (200, {"id": 1, "name": "Ada"}),
        "mem://users/2": (200, {"id": 2, "name": "Lin"}),
        "mem://users/3": (429, {"detail": "rate limited"}),
        "mem://users/4": (500, {"detail": "server error"}),
        "mem://users/5": (404, {"detail": "not found"}),
    }
    
    urls = list(data_store.keys())
    
    scraper = WebScraper(
        data_store=data_store,
        max_rate_limits=2,
        max_failures=2,
        rate_limit_sleep=2.0,
        failure_sleep=1.0,
    )
    
    results = await scraper.scrape(urls)
    
    success = sum(1 for r in results if r.status == "success")
    failed = sum(1 for r in results if r.status in ("server_error",))
    skipped = sum(1 for r in results if r.status == "skipped")
    
    print(f"Results: {success} success, {failed} failed, {skipped} skipped")
    print(f"Stats: {scraper.get_stats()}")
    
    if scraper.failure_circuit.broken:
        print("Circuit broke, will reset and retry later...")
        scraper.reset()


if __name__ == "__main__":
    asyncio.run(main())