Skip to content
This repository was archived by the owner on Apr 26, 2024. It is now read-only.

Commit 4ddffb4

Browse files
committed
make _EventPeristenceQueue a regular async function
1 parent 19aca5c commit 4ddffb4

File tree

1 file changed

+21
-27
lines changed

1 file changed

+21
-27
lines changed

synapse/storage/persist_events.py

+21-27
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,6 @@
3636
from prometheus_client import Counter, Histogram
3737

3838
from twisted.internet import defer
39-
from twisted.internet.defer import Deferred
4039

4140
from synapse.api.constants import EventTypes, Membership
4241
from synapse.events import EventBase
@@ -52,7 +51,7 @@
5251
StateMap,
5352
get_domain_from_id,
5453
)
55-
from synapse.util.async_helpers import ObservableDeferred
54+
from synapse.util.async_helpers import ObservableDeferred, yieldable_gather_results
5655
from synapse.util.metrics import Measure
5756

5857
logger = logging.getLogger(__name__)
@@ -135,25 +134,24 @@ def __init__(
135134
self._currently_persisting_rooms: Set[str] = set()
136135
self._per_item_callback = per_item_callback
137136

138-
def add_to_queue(self, room_id, events_and_contexts, backfilled) -> Deferred:
137+
async def add_to_queue(
138+
self,
139+
room_id: str,
140+
events_and_contexts: Iterable[Tuple[EventBase, EventContext]],
141+
backfilled: bool,
142+
) -> _PersistResult:
139143
"""Add events to the queue, with the given persist_event options.
140144
141145
If we are not already processing events in this room, starts off a background
142146
process to to so, calling the per_item_callback for each item.
143147
144-
NB: due to the normal usage pattern of this method, it does *not*
145-
follow the synapse logcontext rules, and leaves the logcontext in
146-
place whether or not the returned deferred is ready.
147-
148148
Args:
149149
room_id (str):
150150
events_and_contexts (list[(EventBase, EventContext)]):
151151
backfilled (bool):
152152
153153
Returns:
154-
defer.Deferred: a deferred which will resolve once the events are
155-
persisted. Runs its callbacks in the sentinel logcontext. The result
156-
is the same as that returned by the `_per_item_callback` passed to
154+
the result returned by the `_per_item_callback` passed to
157155
`__init__`.
158156
"""
159157
queue = self._event_persist_queues.setdefault(room_id, deque())
@@ -175,7 +173,7 @@ def add_to_queue(self, room_id, events_and_contexts, backfilled) -> Deferred:
175173

176174
end_item.events_and_contexts.extend(events_and_contexts)
177175
self._handle_queue(room_id)
178-
return end_item.deferred.observe()
176+
return await make_deferred_yieldable(end_item.deferred.observe())
179177

180178
def _handle_queue(self, room_id):
181179
"""Attempts to handle the queue for a room if not already being handled.
@@ -278,22 +276,20 @@ async def persist_events(
278276
for event, ctx in events_and_contexts:
279277
partitioned.setdefault(event.room_id, []).append((event, ctx))
280278

281-
deferreds = []
282-
for room_id, evs_ctxs in partitioned.items():
283-
d = self._event_persist_queue.add_to_queue(
279+
async def enqueue(item):
280+
room_id, evs_ctxs = item
281+
return await self._event_persist_queue.add_to_queue(
284282
room_id, evs_ctxs, backfilled=backfilled
285283
)
286-
deferreds.append(d)
287284

288-
# Each deferred returns a map from event ID to existing event ID if the
289-
# event was deduplicated. (The dict may also include other entries if
285+
ret_vals = await yieldable_gather_results(enqueue, partitioned.items())
286+
287+
# Each call to add_to_queue returns a map from event ID to existing event ID if
288+
# the event was deduplicated. (The dict may also include other entries if
290289
# the event was persisted in a batch with other events).
291290
#
292-
# Since we use `defer.gatherResults` we need to merge the returned list
291+
# Since we use `concurrently_execute` we need to merge the returned list
293292
# of dicts into one.
294-
ret_vals = await make_deferred_yieldable(
295-
defer.gatherResults(deferreds, consumeErrors=True)
296-
)
297293
replaced_events: Dict[str, str] = {}
298294
for d in ret_vals:
299295
replaced_events.update(d)
@@ -321,14 +317,12 @@ async def persist_event(
321317
event if it was deduplicated due to an existing event matching the
322318
transaction ID.
323319
"""
324-
deferred = self._event_persist_queue.add_to_queue(
325-
event.room_id, [(event, context)], backfilled=backfilled
326-
)
327-
328-
# The deferred returns a map from event ID to existing event ID if the
320+
# add_to_queue returns a map from event ID to existing event ID if the
329321
# event was deduplicated. (The dict may also include other entries if
330322
# the event was persisted in a batch with other events.)
331-
replaced_events = await make_deferred_yieldable(deferred)
323+
replaced_events = await self._event_persist_queue.add_to_queue(
324+
event.room_id, [(event, context)], backfilled=backfilled
325+
)
332326
replaced_event = replaced_events.get(event.event_id)
333327
if replaced_event:
334328
event = await self.main_store.get_event(replaced_event)

0 commit comments

Comments
 (0)