renamed token_bucket to reflect its laziness and in-memoryness, added a task to implement other versions

This commit is contained in:
2023-08-11 18:51:51 +03:00
parent fe9e25b23d
commit ea808b1895
2 changed files with 3 additions and 5 deletions

View File

@@ -10,8 +10,8 @@ Over a year ago (early 2022) I did a job search, and some of the interview proce
- [ ] implement token bucket - [ ] implement token bucket
- [ ] in-app - [ ] in-app
- [x] in-memory - [x] in-memory, lazy refill
- [ ] redis - [ ] redis, process to refill
- [ ] implement leaky bucket - [ ] implement leaky bucket
- in-app - in-app
- [x] redis - [x] redis

View File

@@ -33,8 +33,6 @@ def leaking_bucket_enqueue(identifier: str, data: str) -> None:
Requests are pulled from the queue and processed at regular intervals in Requests are pulled from the queue and processed at regular intervals in
`leaking_bucket_dequeue` `leaking_bucket_dequeue`
TODO: implement `leaking_bucket_dequeue`
- [ ] done
""" """
store_name = f"{STORE_NAME_PREFIX_LEAKING_BUCKET}:{identifier}" store_name = f"{STORE_NAME_PREFIX_LEAKING_BUCKET}:{identifier}"
@@ -89,7 +87,7 @@ def get_entry_from_token_bucket(identifier: str) -> dict | None:
return TOKEN_BUCKET.get(identifier) return TOKEN_BUCKET.get(identifier)
def token_bucket(identifier: str) -> str: def token_bucket_in_memory_lazy_refill(identifier: str) -> str:
""" """
Tokens are put in the bucket at preset rates periodically. Tokens are put in the bucket at preset rates periodically.
Once the bucket is full, no more tokens are added. Once the bucket is full, no more tokens are added.