|
| 1 | +"""Cachier S3 backend example. |
| 2 | +
|
| 3 | +Demonstrates persistent function caching backed by AWS S3 (or any S3-compatible |
| 4 | +service). Requires boto3 to be installed:: |
| 5 | +
|
| 6 | + pip install cachier[s3] |
| 7 | +
|
| 8 | +A real S3 bucket (or a local S3-compatible service such as MinIO / localstack) |
| 9 | +is needed to run this example. Adjust the configuration variables below to |
| 10 | +match your environment. |
| 11 | +
|
| 12 | +""" |
| 13 | + |
| 14 | +import time |
| 15 | +from datetime import timedelta |
| 16 | + |
| 17 | +try: |
| 18 | + import boto3 |
| 19 | + |
| 20 | + from cachier import cachier |
| 21 | +except ImportError as exc: |
| 22 | + print(f"Missing required package: {exc}") |
| 23 | + print("Install with: pip install cachier[s3]") |
| 24 | + raise SystemExit(1) from exc |
| 25 | + |
| 26 | +# --------------------------------------------------------------------------- |
| 27 | +# Configuration - adjust these to your environment |
| 28 | +# --------------------------------------------------------------------------- |
| 29 | +BUCKET_NAME = "my-cachier-bucket" |
| 30 | +REGION = "us-east-1" |
| 31 | + |
| 32 | +# Optional: point to a local S3-compatible service |
| 33 | +# ENDPOINT_URL = "http://localhost:9000" # MinIO default |
| 34 | +ENDPOINT_URL = None |
| 35 | + |
| 36 | + |
| 37 | +# --------------------------------------------------------------------------- |
| 38 | +# Helper: verify S3 connectivity |
| 39 | +# --------------------------------------------------------------------------- |
| 40 | + |
| 41 | + |
| 42 | +def _check_bucket(client, bucket: str) -> bool: |
| 43 | + """Return True if the bucket is accessible.""" |
| 44 | + try: |
| 45 | + client.head_bucket(Bucket=bucket) |
| 46 | + return True |
| 47 | + except Exception as exc: |
| 48 | + print(f"Cannot access bucket '{bucket}': {exc}") |
| 49 | + return False |
| 50 | + |
| 51 | + |
| 52 | +# --------------------------------------------------------------------------- |
| 53 | +# Demos |
| 54 | +# --------------------------------------------------------------------------- |
| 55 | + |
| 56 | + |
| 57 | +def demo_basic_caching(): |
| 58 | + """Show basic S3 caching: the first call computes, the second reads cache.""" |
| 59 | + print("\n=== Basic S3 caching ===") |
| 60 | + |
| 61 | + @cachier( |
| 62 | + backend="s3", |
| 63 | + s3_bucket=BUCKET_NAME, |
| 64 | + s3_region=REGION, |
| 65 | + s3_endpoint_url=ENDPOINT_URL, |
| 66 | + ) |
| 67 | + def expensive(n: int) -> int: |
| 68 | + """Simulate an expensive computation.""" |
| 69 | + print(f" computing expensive({n})...") |
| 70 | + time.sleep(1) |
| 71 | + return n * n |
| 72 | + |
| 73 | + expensive.clear_cache() |
| 74 | + |
| 75 | + start = time.time() |
| 76 | + r1 = expensive(5) |
| 77 | + t1 = time.time() - start |
| 78 | + print(f"First call: {r1} ({t1:.2f}s)") |
| 79 | + |
| 80 | + start = time.time() |
| 81 | + r2 = expensive(5) |
| 82 | + t2 = time.time() - start |
| 83 | + print(f"Second call: {r2} ({t2:.2f}s) - from cache") |
| 84 | + |
| 85 | + assert r1 == r2 |
| 86 | + assert t2 < t1 |
| 87 | + print("Basic caching works correctly.") |
| 88 | + |
| 89 | + |
| 90 | +def demo_stale_after(): |
| 91 | + """Show stale_after: results expire and are recomputed after the timeout.""" |
| 92 | + print("\n=== Stale-after demo ===") |
| 93 | + |
| 94 | + @cachier( |
| 95 | + backend="s3", |
| 96 | + s3_bucket=BUCKET_NAME, |
| 97 | + s3_region=REGION, |
| 98 | + s3_endpoint_url=ENDPOINT_URL, |
| 99 | + stale_after=timedelta(seconds=3), |
| 100 | + ) |
| 101 | + def timed(n: int) -> float: |
| 102 | + print(f" computing timed({n})...") |
| 103 | + return time.time() |
| 104 | + |
| 105 | + timed.clear_cache() |
| 106 | + r1 = timed(1) |
| 107 | + r2 = timed(1) |
| 108 | + assert r1 == r2, "Second call should hit cache" |
| 109 | + |
| 110 | + print("Sleeping 4 seconds so the entry becomes stale...") |
| 111 | + time.sleep(4) |
| 112 | + |
| 113 | + r3 = timed(1) |
| 114 | + assert r3 > r1, "Should have recomputed after stale period" |
| 115 | + print("Stale-after works correctly.") |
| 116 | + |
| 117 | + |
| 118 | +def demo_client_factory(): |
| 119 | + """Show using a callable factory instead of a pre-built client.""" |
| 120 | + print("\n=== Client factory demo ===") |
| 121 | + |
| 122 | + def make_client(): |
| 123 | + """Lazily create a boto3 S3 client.""" |
| 124 | + kwargs = {"region_name": REGION} |
| 125 | + if ENDPOINT_URL: |
| 126 | + kwargs["endpoint_url"] = ENDPOINT_URL |
| 127 | + return boto3.client("s3", **kwargs) |
| 128 | + |
| 129 | + @cachier( |
| 130 | + backend="s3", |
| 131 | + s3_bucket=BUCKET_NAME, |
| 132 | + s3_client_factory=make_client, |
| 133 | + ) |
| 134 | + def compute(n: int) -> int: |
| 135 | + return n + 100 |
| 136 | + |
| 137 | + compute.clear_cache() |
| 138 | + assert compute(7) == compute(7) |
| 139 | + print("Client factory works correctly.") |
| 140 | + |
| 141 | + |
| 142 | +def demo_cache_management(): |
| 143 | + """Show clear_cache and overwrite_cache.""" |
| 144 | + print("\n=== Cache management demo ===") |
| 145 | + call_count = [0] |
| 146 | + |
| 147 | + @cachier( |
| 148 | + backend="s3", |
| 149 | + s3_bucket=BUCKET_NAME, |
| 150 | + s3_region=REGION, |
| 151 | + s3_endpoint_url=ENDPOINT_URL, |
| 152 | + ) |
| 153 | + def managed(n: int) -> int: |
| 154 | + call_count[0] += 1 |
| 155 | + return n * 3 |
| 156 | + |
| 157 | + managed.clear_cache() |
| 158 | + managed(10) |
| 159 | + managed(10) |
| 160 | + assert call_count[0] == 1, "Should have been called once (cached on second call)" |
| 161 | + |
| 162 | + managed.clear_cache() |
| 163 | + managed(10) |
| 164 | + assert call_count[0] == 2, "Should have recomputed after cache clear" |
| 165 | + |
| 166 | + managed(10, cachier__overwrite_cache=True) |
| 167 | + assert call_count[0] == 3, "Should have recomputed due to overwrite_cache" |
| 168 | + print("Cache management works correctly.") |
| 169 | + |
| 170 | + |
| 171 | +# --------------------------------------------------------------------------- |
| 172 | +# Entry point |
| 173 | +# --------------------------------------------------------------------------- |
| 174 | + |
| 175 | + |
| 176 | +def main(): |
| 177 | + """Run all S3 backend demos.""" |
| 178 | + print("Cachier S3 Backend Demo") |
| 179 | + print("=" * 50) |
| 180 | + |
| 181 | + client = boto3.client( |
| 182 | + "s3", |
| 183 | + region_name=REGION, |
| 184 | + **({"endpoint_url": ENDPOINT_URL} if ENDPOINT_URL else {}), |
| 185 | + ) |
| 186 | + |
| 187 | + if not _check_bucket(client, BUCKET_NAME): |
| 188 | + print(f"\nCreate the bucket first: aws s3 mb s3://{BUCKET_NAME} --region {REGION}") |
| 189 | + raise SystemExit(1) |
| 190 | + |
| 191 | + try: |
| 192 | + demo_basic_caching() |
| 193 | + demo_stale_after() |
| 194 | + demo_client_factory() |
| 195 | + demo_cache_management() |
| 196 | + |
| 197 | + print("\n" + "=" * 50) |
| 198 | + print("All S3 demos completed successfully.") |
| 199 | + print("\nKey benefits of the S3 backend:") |
| 200 | + print("- Persistent cache survives process restarts") |
| 201 | + print("- Shared across machines without a running service") |
| 202 | + print("- Works with any S3-compatible object storage") |
| 203 | + finally: |
| 204 | + client.close() |
| 205 | + |
| 206 | + |
| 207 | +if __name__ == "__main__": |
| 208 | + main() |
0 commit comments