MatrixSynapse/synapse/storage/__init__.py

219 lines
6.5 KiB
Python
Raw Normal View History

2014-08-12 16:10:52 +02:00
# -*- coding: utf-8 -*-
# Copyright 2014 matrix.org
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
2014-08-14 19:40:50 +02:00
from twisted.internet import defer
2014-08-12 16:10:52 +02:00
from synapse.api.events.room import (
2014-08-26 16:20:05 +02:00
RoomMemberEvent, RoomTopicEvent, FeedbackEvent,
# RoomConfigEvent,
RoomNameEvent,
2014-08-12 16:10:52 +02:00
)
2014-08-19 15:20:03 +02:00
from synapse.util.logutils import log_function
2014-08-12 16:10:52 +02:00
from .directory import DirectoryStore
from .feedback import FeedbackStore
from .presence import PresenceStore
from .profile import ProfileStore
from .registration import RegistrationStore
from .room import RoomStore
from .roommember import RoomMemberStore
from .stream import StreamStore
from .pdu import StatePduStore, PduStore
from .transactions import TransactionStore
import json
2014-08-19 15:20:03 +02:00
import logging
2014-08-12 16:10:52 +02:00
import os
2014-08-19 15:20:03 +02:00
logger = logging.getLogger(__name__)
class DataStore(RoomMemberStore, RoomStore,
2014-08-12 16:10:52 +02:00
RegistrationStore, StreamStore, ProfileStore, FeedbackStore,
PresenceStore, PduStore, StatePduStore, TransactionStore,
DirectoryStore):
def __init__(self, hs):
super(DataStore, self).__init__(hs)
self.event_factory = hs.get_event_factory()
self.hs = hs
2014-08-12 16:10:52 +02:00
self.min_token_deferred = self._get_min_token()
self.min_token = None
@defer.inlineCallbacks
2014-08-19 15:20:03 +02:00
@log_function
def persist_event(self, event, backfilled=False):
if event.type == RoomMemberEvent.TYPE:
yield self._store_room_member(event)
2014-08-12 16:10:52 +02:00
elif event.type == FeedbackEvent.TYPE:
yield self._store_feedback(event)
# elif event.type == RoomConfigEvent.TYPE:
# yield self._store_room_config(event)
elif event.type == RoomNameEvent.TYPE:
yield self._store_room_name(event)
elif event.type == RoomTopicEvent.TYPE:
yield self._store_room_topic(event)
ret = yield self._store_event(event, backfilled)
defer.returnValue(ret)
@defer.inlineCallbacks
def get_event(self, event_id):
events_dict = yield self._simple_select_one(
"events",
{"event_id": event_id},
[
"event_id",
"type",
"room_id",
"content",
"unrecognized_keys"
],
)
event = self._parse_event_from_row(events_dict)
defer.returnValue(event)
@defer.inlineCallbacks
2014-08-19 15:20:03 +02:00
@log_function
def _store_event(self, event, backfilled):
# FIXME (erikj): This should be removed when we start amalgamating
# event and pdu storage
yield self.hs.get_federation().fill_out_prev_events(event)
vals = {
"topological_ordering": event.depth,
"event_id": event.event_id,
2014-08-14 19:40:50 +02:00
"type": event.type,
"room_id": event.room_id,
"content": json.dumps(event.content),
"processed": True,
}
if hasattr(event, "outlier"):
vals["outlier"] = event.outlier
else:
vals["outlier"] = False
if backfilled:
if not self.min_token_deferred.called:
yield self.min_token_deferred
self.min_token -= 1
2014-08-19 15:20:03 +02:00
vals["stream_ordering"] = self.min_token
2014-08-15 17:17:36 +02:00
unrec = {
k: v
for k, v in event.get_full_dict().items()
if k not in vals.keys()
}
2014-08-14 19:40:50 +02:00
vals["unrecognized_keys"] = json.dumps(unrec)
2014-08-19 15:20:03 +02:00
try:
yield self._simple_insert("events", vals)
except:
2014-08-20 16:53:07 +02:00
logger.exception(
"Failed to persist, probably duplicate: %s",
event.event_id
2014-08-20 16:53:07 +02:00
)
2014-08-19 15:20:03 +02:00
return
if not backfilled and hasattr(event, "state_key"):
vals = {
"event_id": event.event_id,
"room_id": event.room_id,
2014-08-14 19:40:50 +02:00
"type": event.type,
"state_key": event.state_key,
}
if hasattr(event, "prev_state"):
vals["prev_state"] = event.prev_state
yield self._simple_insert("state_events", vals)
yield self._simple_insert(
"current_state_events",
{
"event_id": event.event_id,
"room_id": event.room_id,
"type": event.type,
"state_key": event.state_key,
}
)
latest = yield self.get_room_events_max_id()
defer.returnValue(latest)
@defer.inlineCallbacks
2014-08-14 19:40:50 +02:00
def get_current_state(self, room_id, event_type=None, state_key=""):
sql = (
2014-08-14 19:40:50 +02:00
"SELECT e.* FROM events as e "
"INNER JOIN current_state_events as c ON e.event_id = c.event_id "
"INNER JOIN state_events as s ON e.event_id = s.event_id "
"WHERE c.room_id = ? "
)
if event_type:
2014-08-14 19:40:50 +02:00
sql += " AND s.type = ? AND s.state_key = ? "
args = (room_id, event_type, state_key)
2014-08-12 16:10:52 +02:00
else:
args = (room_id, )
2014-08-14 17:58:51 +02:00
results = yield self._execute_and_decode(sql, *args)
2014-08-14 18:09:28 +02:00
defer.returnValue([self._parse_event_from_row(r) for r in results])
2014-08-12 16:10:52 +02:00
@defer.inlineCallbacks
def _get_min_token(self):
row = yield self._execute(
None,
2014-08-19 15:20:03 +02:00
"SELECT MIN(stream_ordering) FROM events"
)
self.min_token = row[0][0] if row and row[0] and row[0][0] else -1
self.min_token = min(self.min_token, -1)
2014-08-19 15:20:03 +02:00
logger.debug("min_token is: %s", self.min_token)
defer.returnValue(self.min_token)
2014-08-12 16:10:52 +02:00
def schema_path(schema):
""" Get a filesystem path for the named database schema
Args:
schema: Name of the database schema.
Returns:
A filesystem path pointing at a ".sql" file.
"""
dir_path = os.path.dirname(__file__)
schemaPath = os.path.join(dir_path, "schema", schema + ".sql")
return schemaPath
def read_schema(schema):
""" Read the named database schema.
Args:
schema: Name of the datbase schema.
Returns:
A string containing the database schema.
"""
with open(schema_path(schema)) as schema_file:
return schema_file.read()