diff --git a/my_limiter/algos/__init__.py b/my_limiter/algos/__init__.py index 607d878..ffac238 100644 --- a/my_limiter/algos/__init__.py +++ b/my_limiter/algos/__init__.py @@ -4,7 +4,7 @@ These are implementations of different (in-application) rate limiting algorithms `identifier` is used as the first (usually only) argument for each implementation because it might refer to IP address, a session ID, or perhaps an API key or token. """ -from .token_bucket import token_bucket_in_memory_lazy_refill, TooManyRequests +from .token_bucket_in_memory import token_bucket_in_memory_lazy_refill, TooManyRequests from .leaky_bucket import ( leaking_bucket_dequeue, leaking_bucket_enqueue, diff --git a/my_limiter/algos/token_bucket.py b/my_limiter/algos/token_bucket_in_memory.py similarity index 95% rename from my_limiter/algos/token_bucket.py rename to my_limiter/algos/token_bucket_in_memory.py index a7c80fa..8e5a981 100644 --- a/my_limiter/algos/token_bucket.py +++ b/my_limiter/algos/token_bucket_in_memory.py @@ -16,7 +16,7 @@ REFILL_EVERY_SECONDS = 15 NUM_TOKENS_TO_REFILL = 4 -def get_entry_from_token_bucket(identifier: str) -> dict | None: +def get_entry_from_token_bucket_in_memory(identifier: str) -> dict | None: """ This is implemented independently in order to decouple it from its caller. Here it is initially implemented in-memory, but for scalability we'd