switched algo to leaky bucket, fixed a couple of bugs to get it moving. added a default arg for 'data' in order to test
This commit is contained in:
@@ -4,7 +4,8 @@ These are implementations of different (in-application) rate limiting algorithms
|
|||||||
`identifier` is used as the first (usually only) argument for each implementation
|
`identifier` is used as the first (usually only) argument for each implementation
|
||||||
because it might refer to IP address, a session ID, or perhaps an API key or token.
|
because it might refer to IP address, a session ID, or perhaps an API key or token.
|
||||||
"""
|
"""
|
||||||
from .token_bucket_in_memory import token_bucket_in_memory_lazy_refill, TooManyRequests
|
from .exceptions import TooManyRequests
|
||||||
|
from .token_bucket_in_memory import token_bucket_in_memory_lazy_refill
|
||||||
from .leaky_bucket import (
|
from .leaky_bucket import (
|
||||||
leaking_bucket_dequeue,
|
leaking_bucket_dequeue,
|
||||||
leaking_bucket_enqueue,
|
leaking_bucket_enqueue,
|
||||||
|
|||||||
3
my_limiter/algos/exceptions.py
Normal file
3
my_limiter/algos/exceptions.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
|
||||||
|
class TooManyRequests(Exception):
|
||||||
|
pass
|
||||||
@@ -2,13 +2,11 @@ import datetime as dt
|
|||||||
|
|
||||||
import redis
|
import redis
|
||||||
|
|
||||||
|
from .exceptions import TooManyRequests
|
||||||
|
|
||||||
r = redis.Redis()
|
r = redis.Redis()
|
||||||
|
|
||||||
|
|
||||||
class TooManyRequests(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
MAX_CAPACITY = 8
|
MAX_CAPACITY = 8
|
||||||
STORE_NAME_PREFIX_LEAKING_BUCKET = "leaking_bucket:queue:tasks"
|
STORE_NAME_PREFIX_LEAKING_BUCKET = "leaking_bucket:queue:tasks"
|
||||||
@@ -17,7 +15,7 @@ RUN_LEAKING_BUCKET_TASKS_EVERY_X_SECONDS = 15
|
|||||||
NUM_TASKS_TO_RUN_FOR_EACH_USER_AT_INTERVAL = 2
|
NUM_TASKS_TO_RUN_FOR_EACH_USER_AT_INTERVAL = 2
|
||||||
|
|
||||||
|
|
||||||
def leaking_bucket_enqueue(identifier: str, data: str) -> None:
|
def leaking_bucket_enqueue(identifier: str, data: str = "") -> None:
|
||||||
"""
|
"""
|
||||||
When a request arrives, the system checks if the queue for this particular
|
When a request arrives, the system checks if the queue for this particular
|
||||||
`identifier` is full. If it is not full, the request is added to the queue.
|
`identifier` is full. If it is not full, the request is added to the queue.
|
||||||
|
|||||||
@@ -2,13 +2,11 @@ import datetime as dt
|
|||||||
|
|
||||||
import redis
|
import redis
|
||||||
|
|
||||||
|
from .exceptions import TooManyRequests
|
||||||
|
|
||||||
r = redis.Redis()
|
r = redis.Redis()
|
||||||
|
|
||||||
|
|
||||||
class TooManyRequests(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
TOKEN_BUCKET = {}
|
TOKEN_BUCKET = {}
|
||||||
MAX_CAPACITY = 8
|
MAX_CAPACITY = 8
|
||||||
@@ -20,7 +18,7 @@ def get_entry_from_token_bucket_in_memory(identifier: str) -> dict | None:
|
|||||||
"""
|
"""
|
||||||
This is implemented independently in order to decouple it from its caller.
|
This is implemented independently in order to decouple it from its caller.
|
||||||
Here it is initially implemented in-memory, but for scalability we'd
|
Here it is initially implemented in-memory, but for scalability we'd
|
||||||
want to use something more long-lived.
|
want to use something more durable.
|
||||||
"""
|
"""
|
||||||
return TOKEN_BUCKET.get(identifier)
|
return TOKEN_BUCKET.get(identifier)
|
||||||
|
|
||||||
@@ -34,7 +32,7 @@ def token_bucket_in_memory_lazy_refill(identifier: str) -> str:
|
|||||||
To be explicit, there is a token bucket for every `identifier`,
|
To be explicit, there is a token bucket for every `identifier`,
|
||||||
aka every user/IP
|
aka every user/IP
|
||||||
"""
|
"""
|
||||||
entry = get_entry_from_token_bucket(identifier)
|
entry = get_entry_from_token_bucket_in_memory(identifier)
|
||||||
|
|
||||||
if entry is None:
|
if entry is None:
|
||||||
TOKEN_BUCKET[identifier] = {
|
TOKEN_BUCKET[identifier] = {
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ from . import algos
|
|||||||
application = f.Flask(__name__)
|
application = f.Flask(__name__)
|
||||||
|
|
||||||
|
|
||||||
increment_requests_func = algos.token_bucket_in_memory_lazy_refill
|
increment_requests_func = algos.leaking_bucket_enqueue
|
||||||
|
|
||||||
|
|
||||||
@application.before_request
|
@application.before_request
|
||||||
|
|||||||
Reference in New Issue
Block a user