Skip to content

Commit c1e8569

Browse files
Соблюдение новых ограничений на длительность запросов (#217)
* Соблюдение новых ограничений на длительность запросов Fixes #187 * Fixes for older Python versions * More tests
1 parent 754de24 commit c1e8569

File tree

7 files changed

+207
-290
lines changed

7 files changed

+207
-290
lines changed

fast_bitrix24/__version__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "1.5.16"
1+
__version__ = "1.6a1"

fast_bitrix24/leaky_bucket.py

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
import asyncio
2+
import collections
3+
import contextlib
4+
import time
5+
6+
7+
RequestRecord = collections.namedtuple("RequestRecord", "when, duration")
8+
9+
10+
class LeakyBucketLimiter:
11+
"""The class emulates a leaky bucket where the consumer may only run requests
12+
until he has used up X seconds of request running time in total
13+
during a period of Y seconds.
14+
15+
When the consumer has hit the limit, he will have to wait.
16+
"""
17+
18+
def __init__(self, max_request_running_time: float, measurement_period: float):
19+
# how much time fits into the bucket before it starts failing
20+
self.max_request_running_time = max_request_running_time
21+
22+
# over what period of time should the max_request_running_time be measured
23+
self.measurement_period = measurement_period
24+
25+
# request register. left - most recent, right - least recent
26+
self.request_register = collections.deque()
27+
28+
@contextlib.asynccontextmanager
29+
async def acquire(self):
30+
"""A context manager that will wait until it's safe to make the next request"""
31+
await asyncio.sleep(self.get_needed_sleep_time())
32+
33+
try:
34+
yield
35+
finally:
36+
self.clean_up()
37+
38+
def get_needed_sleep_time(self) -> float:
39+
"""How much time to sleep before it's safe to make a request"""
40+
acc = 0
41+
for record in self.request_register:
42+
acc += record.duration
43+
if acc >= self.max_request_running_time:
44+
return record.when + self.measurement_period - time.monotonic()
45+
return 0
46+
47+
def clean_up(self):
48+
"""Remove all stale records from the record register"""
49+
if not self.request_register:
50+
return
51+
52+
cut_off = time.monotonic() - self.measurement_period
53+
while self.request_register[-1].when < cut_off:
54+
self.request_register.pop()
55+
56+
def register(self, request_duration: float):
57+
"""Register how long the last request has taken"""
58+
self.request_register.appendleft(
59+
RequestRecord(time.monotonic(), request_duration)
60+
)

fast_bitrix24/logger.py

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -5,19 +5,17 @@
55
logger.setLevel(DEBUG)
66
logger.addHandler(NullHandler())
77

8-
logger.debug(f"fast_bitrix24 version: {__version__}")
9-
10-
try:
11-
from IPython import get_ipython
12-
13-
logger.debug(f"IPython: {get_ipython()}")
14-
except ImportError:
15-
logger.debug("No IPython found")
16-
17-
188
def log(func):
199
async def wrapper(*args, **kwargs):
2010
logger.info(f"Starting {func.__name__}({args}, {kwargs})")
11+
logger.debug(f"fast_bitrix24 version: {__version__}")
12+
try:
13+
from IPython import get_ipython
14+
15+
logger.debug(f"IPython: {get_ipython()}")
16+
except ImportError:
17+
logger.debug("No IPython found")
18+
2119
return await func(*args, **kwargs)
2220

2321
return wrapper

fast_bitrix24/srh.py

Lines changed: 35 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,23 @@
1-
import time
2-
from asyncio import Event, sleep, TimeoutError
3-
from collections import deque
1+
from asyncio import Event, TimeoutError, sleep
42
from contextlib import asynccontextmanager
53

64
import aiohttp
75
from aiohttp.client_exceptions import (
86
ClientConnectionError,
97
ClientPayloadError,
108
ClientResponseError,
11-
ServerTimeoutError,
129
)
1310

11+
from .leaky_bucket import LeakyBucketLimiter
1412
from .logger import logger
1513
from .utils import _url_valid
1614

17-
BITRIX_POOL_SIZE = 50
18-
BITRIX_RPS = 2.0
1915
BITRIX_MAX_BATCH_SIZE = 50
2016
BITRIX_MAX_CONCURRENT_REQUESTS = 50
2117

18+
BITRIX_MAX_REQUEST_RUNNING_TIME = 480
19+
BITRIX_MEASUREMENT_PERIOD = 10 * 60
20+
2221
MAX_RETRIES = 10
2322

2423
RESTORE_CONNECTIONS_FACTOR = 1.3 # скорость восстановления количества запросов
@@ -33,6 +32,14 @@ class ServerError(Exception):
3332
pass
3433

3534

35+
RETRIED_ERRORS = (
36+
ClientPayloadError,
37+
ClientConnectionError,
38+
ServerError,
39+
TimeoutError,
40+
)
41+
42+
3643
class ServerRequestHandler:
3744
"""
3845
Используется для контроля скорости доступа к серверам Битрикс.
@@ -48,19 +55,13 @@ def __init__(self, webhook, respect_velocity_policy, client):
4855
self.webhook = self.standardize_webhook(webhook)
4956
self.respect_velocity_policy = respect_velocity_policy
5057

51-
self.requests_per_second = BITRIX_RPS
52-
self.pool_size = BITRIX_POOL_SIZE
53-
5458
self.active_runs = 0
5559

5660
# если пользователь при инициализации передал клиента со своими настройками,
5761
# то будем использовать его клиента
5862
self.client_provided_by_user = bool(client)
5963
self.session = client
6064

61-
# rr - requests register - список отправленных запросов к серверу
62-
self.rr = deque()
63-
6465
# лимит количества одновременных запросов,
6566
# установленный конструктором или пользователем
6667
self.mcr_max = BITRIX_MAX_CONCURRENT_REQUESTS
@@ -76,6 +77,9 @@ def __init__(self, webhook, respect_velocity_policy, client):
7677
# если отрицательное - количество последовательно полученных ошибок
7778
self.successive_results = 0
7879

80+
# rate limiters by method
81+
self.limiters = {} # dict[str, LeakyBucketLimiter]
82+
7983
@staticmethod
8084
def standardize_webhook(webhook):
8185
"""Приводит `webhook` к стандартному виду."""
@@ -120,36 +124,37 @@ async def handle_sessions(self):
120124
if not self.active_runs and self.session and not self.session.closed:
121125
await self.session.close()
122126

123-
async def single_request(self, method, params=None) -> dict:
127+
async def single_request(self, method: str, params=None) -> dict:
124128
"""Делает единичный запрос к серверу,
125129
с повторными попытками при необходимости."""
126130

127131
while True:
128132

129133
try:
130-
result = await self.request_attempt(method, params)
134+
result = await self.request_attempt(method.strip().lower(), params)
131135
self.success()
132136
return result
133137

134-
except (
135-
ClientPayloadError,
136-
ClientConnectionError,
137-
ServerError,
138-
TimeoutError,
139-
) as err:
138+
except RETRIED_ERRORS as err: # all other exceptions will propagate
140139
self.failure(err)
141140

142141
async def request_attempt(self, method, params=None) -> dict:
143142
"""Делает попытку запроса к серверу, ожидая при необходимости."""
144143

145144
try:
146-
async with self.acquire():
145+
async with self.acquire(method):
147146
logger.debug(f"Requesting {{'method': {method}, 'params': {params}}}")
147+
148148
async with self.session.post(
149149
url=self.webhook + method, json=params
150150
) as response:
151151
json = await response.json(encoding="utf-8")
152+
152153
logger.debug("Response: %s", json)
154+
155+
request_run_time = json["time"]["operating"]
156+
self.limiters[method].register(request_run_time)
157+
153158
return json
154159

155160
except ClientResponseError as error:
@@ -175,15 +180,21 @@ def failure(self, err: Exception):
175180
) from err
176181

177182
@asynccontextmanager
178-
async def acquire(self):
183+
async def acquire(self, method: str):
179184
"""Ожидает, пока не станет безопасно делать запрос к серверу."""
180185

181186
await self.autothrottle()
182187

183188
async with self.limit_concurrent_requests():
184189
if self.respect_velocity_policy:
185-
async with self.limit_request_velocity():
190+
if method not in self.limiters:
191+
self.limiters[method] = LeakyBucketLimiter(
192+
BITRIX_MAX_REQUEST_RUNNING_TIME, BITRIX_MEASUREMENT_PERIOD
193+
)
194+
195+
async with self.limiters[method].acquire():
186196
yield
197+
187198
else:
188199
yield
189200

@@ -220,7 +231,7 @@ async def autothrottle(self):
220231

221232
@asynccontextmanager
222233
async def limit_concurrent_requests(self):
223-
"""Не позволяет оновременно выполнять
234+
"""Не позволяет одновременно выполнять
224235
более `self.mcr_cur_limit` запросов."""
225236

226237
while self.concurrent_requests > self.mcr_cur_limit:
@@ -235,30 +246,3 @@ async def limit_concurrent_requests(self):
235246
finally:
236247
self.concurrent_requests -= 1
237248
self.request_complete.set()
238-
239-
@asynccontextmanager
240-
async def limit_request_velocity(self):
241-
"""Ограничивает скорость запросов к серверу."""
242-
243-
# если пул заполнен, ждать
244-
while len(self.rr) >= self.pool_size:
245-
time_from_last_request = time.monotonic() - self.rr[0]
246-
time_to_wait = 1 / self.requests_per_second - time_from_last_request
247-
if time_to_wait > 0:
248-
await sleep(time_to_wait)
249-
else:
250-
break
251-
252-
# зарегистрировать запрос в очереди
253-
start_time = time.monotonic()
254-
self.rr.appendleft(start_time)
255-
256-
# отдать управление
257-
try:
258-
yield
259-
260-
# подчистить пул
261-
finally:
262-
trim_time = start_time - self.pool_size / self.requests_per_second
263-
while self.rr[-1] < trim_time:
264-
self.rr.pop()

0 commit comments

Comments
 (0)