renamed token bucket module so it's only home to the in-memory, lazy refill version

This commit is contained in:
2023-08-11 19:02:56 +03:00
parent 9bab993d15
commit e24d166cb2
2 changed files with 2 additions and 2 deletions

View File

@@ -4,7 +4,7 @@ These are implementations of different (in-application) rate limiting algorithms
`identifier` is used as the first (usually only) argument for each implementation
because it might refer to IP address, a session ID, or perhaps an API key or token.
"""
from .token_bucket import token_bucket_in_memory_lazy_refill, TooManyRequests
from .token_bucket_in_memory import token_bucket_in_memory_lazy_refill, TooManyRequests
from .leaky_bucket import (
leaking_bucket_dequeue,
leaking_bucket_enqueue,

View File

@@ -16,7 +16,7 @@ REFILL_EVERY_SECONDS = 15
NUM_TOKENS_TO_REFILL = 4
def get_entry_from_token_bucket(identifier: str) -> dict | None:
def get_entry_from_token_bucket_in_memory(identifier: str) -> dict | None:
"""
This is implemented independently in order to decouple it from its caller.
Here it is initially implemented in-memory, but for scalability we'd