Skip to content

Commit cf4dde5

Browse files
fix: resolve all Ty type checker errors
Fix 22 type checking errors by refactoring decorators with ParamSpec, implementing descriptor protocol for cache decorator, updating port interfaces, and adding TYPE_CHECKING imports. No cast() usage.
1 parent 287543f commit cf4dde5

22 files changed

Lines changed: 370 additions & 270 deletions

archipy/adapters/redis/mocks.py

Lines changed: 42 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,45 @@
1414
from archipy.configs.config_template import RedisConfig, RedisMode
1515

1616

17+
class FakeRedisClusterWrapper(fakeredis.FakeRedis):
18+
"""Wrapper around FakeRedis that adds cluster-specific methods."""
19+
20+
def cluster_info(self) -> dict[str, Any]:
21+
"""Return fake cluster info."""
22+
return {
23+
"cluster_state": "ok",
24+
"cluster_slots_assigned": 16384,
25+
"cluster_slots_ok": 16384,
26+
"cluster_slots_pfail": 0,
27+
"cluster_slots_fail": 0,
28+
"cluster_known_nodes": 6,
29+
"cluster_size": 3,
30+
}
31+
32+
def cluster_nodes(self) -> str:
33+
"""Return fake cluster nodes info."""
34+
return "fake cluster nodes info"
35+
36+
def cluster_slots(self) -> list[tuple[int, int, list[str]]]:
37+
"""Return fake cluster slots info."""
38+
slot1: tuple[int, int, list[str]] = (0, 5460, ["127.0.0.1", "7000"])
39+
slot2: tuple[int, int, list[str]] = (5461, 10922, ["127.0.0.1", "7001"])
40+
slot3: tuple[int, int, list[str]] = (10923, 16383, ["127.0.0.1", "7002"])
41+
return [slot1, slot2, slot3]
42+
43+
def cluster_keyslot(self, key: str) -> int:
44+
"""Return fake cluster keyslot for a key."""
45+
return hash(key) % 16384
46+
47+
def cluster_countkeysinslot(self, slot: int) -> int:
48+
"""Return fake count of keys in a slot."""
49+
return 0
50+
51+
def cluster_get_keys_in_slot(self, slot: int, count: int) -> list[str]:
52+
"""Return fake keys in a slot."""
53+
return []
54+
55+
1756
class RedisMock(RedisAdapter):
1857
"""A Redis adapter implementation using fakeredis for testing."""
1958

@@ -28,29 +67,10 @@ def __init__(self, redis_config: RedisConfig | None = None) -> None:
2867

2968
def _setup_fake_clients(self) -> None:
3069
"""Setup fake Redis clients that simulate different modes."""
31-
fake_client: Any = fakeredis.FakeRedis(decode_responses=True)
32-
33-
# For testing purposes, we simulate different modes
3470
if self.config.MODE == RedisMode.CLUSTER:
35-
# Add cluster-specific mock methods using setattr for dynamic attributes
36-
fake_client.cluster_info = lambda: {
37-
"cluster_state": "ok",
38-
"cluster_slots_assigned": 16384,
39-
"cluster_slots_ok": 16384,
40-
"cluster_slots_pfail": 0,
41-
"cluster_slots_fail": 0,
42-
"cluster_known_nodes": 6,
43-
"cluster_size": 3,
44-
}
45-
fake_client.cluster_nodes = lambda: "fake cluster nodes info"
46-
fake_client.cluster_slots = lambda: [
47-
(0, 5460, ["127.0.0.1", 7000]),
48-
(5461, 10922, ["127.0.0.1", 7001]),
49-
(10923, 16383, ["127.0.0.1", 7002]),
50-
]
51-
fake_client.cluster_keyslot = lambda key: hash(key) % 16384
52-
fake_client.cluster_countkeysinslot = lambda slot: 0
53-
fake_client.cluster_get_keys_in_slot = lambda slot, count: []
71+
fake_client: Redis = FakeRedisClusterWrapper(decode_responses=True)
72+
else:
73+
fake_client = fakeredis.FakeRedis(decode_responses=True)
5474

5575
self.client = fake_client
5676
self.read_only_client = fake_client

archipy/adapters/temporal/ports.py

Lines changed: 20 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
the ArchiPy architecture.
66
"""
77

8-
from abc import abstractmethod
8+
from abc import ABC, abstractmethod
99
from collections.abc import Callable
1010
from typing import TYPE_CHECKING, Any, TypeVar
1111

@@ -283,7 +283,7 @@ async def start_worker(
283283
self,
284284
task_queue: str,
285285
workflows: list[type] | None = None,
286-
activities: list[object] | None = None,
286+
activities: list[Callable[..., Any]] | None = None,
287287
build_id: str | None = None,
288288
identity: str | None = None,
289289
max_concurrent_workflow_tasks: int | None = None,
@@ -295,7 +295,7 @@ async def start_worker(
295295
task_queue (str): The task queue this worker will poll from.
296296
workflows (list[type], optional): List of workflow classes to register.
297297
Defaults to None.
298-
activities (list[object], optional): List of activity instances to register.
298+
activities (list[Callable], optional): List of activity callables to register.
299299
Defaults to None.
300300
build_id (str, optional): Build identifier for worker versioning.
301301
Defaults to None.
@@ -358,7 +358,21 @@ class WorkflowDescription:
358358
pass
359359

360360

361-
class WorkerHandle:
362-
"""Type stub for worker handle."""
361+
class WorkerHandle(ABC):
362+
"""Base type for worker handle.
363363
364-
pass
364+
This is an abstract base class that concrete implementations should extend.
365+
It provides a common interface for worker handle operations.
366+
"""
367+
368+
worker_id: str
369+
task_queue: str
370+
371+
@abstractmethod
372+
async def stop(self, grace_period: int = 30) -> None:
373+
"""Stop the worker gracefully.
374+
375+
Args:
376+
grace_period: Maximum time in seconds to wait for graceful shutdown.
377+
"""
378+
...

archipy/adapters/temporal/worker.py

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
import asyncio
99
import logging
10+
from collections.abc import Callable
1011
from typing import Any, override
1112
from uuid import uuid4
1213

@@ -31,7 +32,7 @@ class WorkerHandle(PortWorkerHandle):
3132
worker_id (str): Unique identifier for this worker instance.
3233
task_queue (str): The task queue this worker polls from.
3334
workflows (list[type]): List of workflow types registered with this worker.
34-
activities (list[object]): List of activity instances registered with this worker.
35+
activities (list[Callable]): List of activity callables registered with this worker.
3536
build_id (str | None): Build identifier for worker versioning.
3637
identity (str | None): Worker identity for debugging and monitoring.
3738
max_concurrent_workflow_tasks (int): Maximum concurrent workflow tasks.
@@ -44,7 +45,7 @@ def __init__(
4445
worker_id: str,
4546
task_queue: str,
4647
workflows: list[type] | None = None,
47-
activities: list[object] | None = None,
48+
activities: list[Callable[..., Any]] | None = None,
4849
build_id: str | None = None,
4950
identity: str | None = None,
5051
max_concurrent_workflow_tasks: int | None = None,
@@ -57,7 +58,7 @@ def __init__(
5758
worker_id (str): Unique identifier for this worker instance.
5859
task_queue (str): The task queue this worker polls from.
5960
workflows (list[type], optional): List of workflow types. Defaults to None.
60-
activities (list[object], optional): List of activity instances. Defaults to None.
61+
activities (list[Callable], optional): List of activity callables. Defaults to None.
6162
build_id (str, optional): Build identifier for worker versioning. Defaults to None.
6263
identity (str, optional): Worker identity. Defaults to None.
6364
max_concurrent_workflow_tasks (int, optional): Maximum concurrent workflow tasks.
@@ -253,7 +254,7 @@ async def start_worker(
253254
self,
254255
task_queue: str,
255256
workflows: list[type] | None = None,
256-
activities: list[object] | None = None,
257+
activities: list[Callable[..., Any]] | None = None,
257258
build_id: str | None = None,
258259
identity: str | None = None,
259260
max_concurrent_workflow_tasks: int | None = None,
@@ -265,7 +266,7 @@ async def start_worker(
265266
task_queue (str): The task queue this worker will poll from.
266267
workflows (list[type], optional): List of workflow classes to register.
267268
Defaults to None.
268-
activities (list[object], optional): List of activity instances to register.
269+
activities (list[Callable], optional): List of activity callables to register.
269270
Defaults to None.
270271
build_id (str, optional): Build identifier for worker versioning.
271272
Defaults to None.
@@ -288,13 +289,11 @@ async def start_worker(
288289

289290
try:
290291
# Create the Temporal worker
291-
# Prepare activities list - Worker accepts list of activity instances
292-
activities_list: list[Any] = activities if activities is not None else []
293292
worker = Worker(
294293
client,
295294
task_queue=task_queue,
296295
workflows=workflows or [],
297-
activities=activities_list,
296+
activities=activities or [],
298297
build_id=build_id,
299298
identity=worker_identity,
300299
max_concurrent_workflow_tasks=max_concurrent_workflow_tasks,
@@ -345,11 +344,11 @@ async def start_worker(
345344
) from error
346345

347346
@override
348-
async def stop_worker(self, worker_handle: WorkerHandle) -> None:
347+
async def stop_worker(self, worker_handle: PortWorkerHandle) -> None:
349348
"""Stop a running Temporal worker.
350349
351350
Args:
352-
worker_handle (WorkerHandle): Handle to the worker to stop.
351+
worker_handle (PortWorkerHandle): Handle to the worker to stop.
353352
354353
Raises:
355354
WorkerShutdownError: If the worker fails to stop gracefully.
Lines changed: 66 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,76 @@
11
from collections.abc import Callable
22
from functools import wraps
3-
from typing import Any, Protocol, TypeVar
4-
5-
T = TypeVar("T")
6-
R = TypeVar("R")
7-
P_co = TypeVar("P_co", bound=Callable[..., Any], covariant=True)
8-
9-
10-
class ClearableFunction(Protocol[P_co]):
11-
"""Protocol for a function with a clear_cache method."""
12-
13-
def __call__(self, *args: object, **kwargs: object) -> object:
14-
"""Call the function."""
15-
...
3+
from typing import Any
4+
5+
6+
class CachedFunction[**P, R]:
7+
"""Wrapper class for a cached function with a clear_cache method."""
8+
9+
def __init__(self, func: Callable[P, R], cache: Any) -> None:
10+
"""Initialize the cached function wrapper.
11+
12+
Args:
13+
func: The function to wrap.
14+
cache: The cache instance to use.
15+
"""
16+
self._func = func
17+
self._cache = cache
18+
# Preserve function metadata
19+
wraps(func)(self)
20+
21+
def __get__(self, obj: object, objtype: type | None = None) -> CachedFunction[P, R]:
22+
"""Support instance methods by implementing descriptor protocol."""
23+
if obj is None:
24+
return self
25+
# Return a bound method-like callable
26+
from functools import partial
27+
28+
bound_call = partial(self.__call__, obj)
29+
# Create a new CachedFunction instance that wraps the bound method
30+
# This ensures clear_cache is available on the bound method
31+
bound_cached = CachedFunction(bound_call, self._cache)
32+
return bound_cached
33+
34+
def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R:
35+
"""Call the cached function.
36+
37+
Args:
38+
*args: Positional arguments to pass to the function.
39+
**kwargs: Keyword arguments to pass to the function.
40+
41+
Returns:
42+
The result of the function call (from cache or fresh).
43+
"""
44+
# Create a key based on function name, args, and kwargs
45+
func_name = getattr(self._func, "__name__", "unknown")
46+
key_parts = [func_name]
47+
# Skip first arg if it looks like 'self' (for instance methods)
48+
# We check if args[0] has __dict__ which indicates it's likely an instance
49+
if args and hasattr(args[0], "__dict__"):
50+
key_parts.extend(str(arg) for arg in args[1:])
51+
else:
52+
key_parts.extend(str(arg) for arg in args)
53+
key_parts.extend(f"{k}:{v}" for k, v in sorted(kwargs.items()))
54+
key = ":".join(key_parts)
55+
56+
# Check if result is in cache
57+
if key in self._cache:
58+
return self._cache[key]
59+
60+
# Call the function and cache the result
61+
result = self._func(*args, **kwargs)
62+
self._cache[key] = result
63+
return result
1664

1765
def clear_cache(self) -> None:
1866
"""Clear the cache."""
19-
...
67+
self._cache.clear()
2068

2169

22-
def ttl_cache_decorator(
70+
def ttl_cache_decorator[**P, R](
2371
ttl_seconds: int = 300,
2472
maxsize: int = 100,
25-
) -> Callable[[Callable[..., Any]], ClearableFunction[Callable[..., Any]]]:
73+
) -> Callable[[Callable[P, R]], CachedFunction[P, R]]:
2674
"""Decorator that provides a TTL cache for methods.
2775
2876
Args:
@@ -36,31 +84,7 @@ def ttl_cache_decorator(
3684

3785
cache: TTLCache = TTLCache(maxsize=maxsize, ttl=ttl_seconds)
3886

39-
def decorator(func: Callable[..., Any]) -> ClearableFunction[Callable[..., Any]]:
40-
@wraps(func)
41-
def wrapper(*args: object, **kwargs: object) -> object:
42-
# Create a key based on function name, args, and kwargs
43-
func_name = getattr(func, "__name__", "unknown")
44-
key_parts = [func_name]
45-
key_parts.extend(str(arg) for arg in args[1:]) # Skip self
46-
key_parts.extend(f"{k}:{v}" for k, v in sorted(kwargs.items()))
47-
key = ":".join(key_parts)
48-
49-
# Check if result is in cache
50-
if key in cache:
51-
return cache[key]
52-
53-
# Call the function and cache the result
54-
result = func(*args, **kwargs)
55-
cache[key] = result
56-
return result
57-
58-
# Add a method to clear the cache
59-
def clear_cache() -> None:
60-
cache.clear()
61-
62-
# Add clear_cache method to wrapper using setattr for dynamic attribute
63-
wrapper.clear_cache = clear_cache
64-
return wrapper
87+
def decorator(func: Callable[P, R]) -> CachedFunction[P, R]:
88+
return CachedFunction(func, cache)
6589

6690
return decorator

archipy/helpers/decorators/deprecation_exception.py

Lines changed: 14 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,22 @@
11
from collections.abc import Callable
22
from functools import wraps
3-
from typing import Any, TypeVar
3+
from typing import Any, ParamSpec, TypeVar
44

55
from archipy.models.errors import DeprecationError
66
from archipy.models.types.language_type import LanguageType
77

8-
# Define a type variable for the return type of the decorated function
9-
F = TypeVar("F", bound=Callable[..., Any])
8+
# Define type variables for the decorator
9+
P = ParamSpec("P")
10+
R = TypeVar("R")
1011

1112
# Define a type variable for the return type of the decorated class
1213
T = TypeVar("T", bound=type[Any])
1314

1415

15-
def method_deprecation_error(operation: str | None = None, lang: LanguageType = LanguageType.EN) -> Callable[[F], F]:
16+
def method_deprecation_error(
17+
operation: str | None = None,
18+
lang: LanguageType = LanguageType.EN,
19+
) -> Callable[[Callable[P, R]], Callable[P, R]]:
1620
"""Decorator that raises a DeprecationError when the decorated method is called.
1721
1822
This decorator is used to mark methods as deprecated and immediately prevent
@@ -48,10 +52,13 @@ def old_method(self):
4852
```
4953
"""
5054

51-
def decorator(func: F) -> F:
55+
def decorator(func: Callable[P, R]) -> Callable[P, R]:
56+
# Capture function name before wrapping - use getattr for type safety
57+
func_name = getattr(func, "__name__", "unknown")
58+
5259
@wraps(func)
53-
def wrapper(*_args: Any, **_kwargs: Any) -> Any:
54-
operation_name = operation if operation is not None else func.__name__
60+
def wrapper(*_args: P.args, **_kwargs: P.kwargs) -> R:
61+
operation_name = operation if operation is not None else func_name
5562
raise DeprecationError(deprecated_feature=operation_name, lang=lang)
5663

5764
return wrapper

0 commit comments

Comments
 (0)