Fix SQL so that accepts we may want to persist events twice.

pull/12/head
Erik Johnston 2014-10-30 10:11:06 +00:00
parent 53216a500d
commit aa80900a8e
2 changed files with 47 additions and 25 deletions

View File

@ -99,7 +99,8 @@ class EventFederationStore(SQLBaseStore):
"event_id": event_id, "event_id": event_id,
"prev_event_id": e_id, "prev_event_id": e_id,
"room_id": room_id, "room_id": room_id,
} },
or_ignore=True,
) )
# Update the extremities table if this is not an outlier. # Update the extremities table if this is not an outlier.
@ -120,7 +121,7 @@ class EventFederationStore(SQLBaseStore):
# We only insert as a forward extremity the new pdu if there are no # We only insert as a forward extremity the new pdu if there are no
# other pdus that reference it as a prev pdu # other pdus that reference it as a prev pdu
query = ( query = (
"INSERT INTO %(table)s (event_id, room_id) " "INSERT OR IGNORE INTO %(table)s (event_id, room_id) "
"SELECT ?, ? WHERE NOT EXISTS (" "SELECT ?, ? WHERE NOT EXISTS ("
"SELECT 1 FROM %(event_edges)s WHERE " "SELECT 1 FROM %(event_edges)s WHERE "
"prev_event_id = ? " "prev_event_id = ? "
@ -144,7 +145,8 @@ class EventFederationStore(SQLBaseStore):
values={ values={
"event_id": e_id, "event_id": e_id,
"room_id": room_id, "room_id": room_id,
} },
or_ignore=True,
) )
# Also delete from the backwards extremities table all ones that # Also delete from the backwards extremities table all ones that

View File

@ -181,11 +181,16 @@ class SignatureStore(SQLBaseStore):
algorithm (str): Hashing algorithm. algorithm (str): Hashing algorithm.
hash_bytes (bytes): Hash function output bytes. hash_bytes (bytes): Hash function output bytes.
""" """
self._simple_insert_txn(txn, "event_content_hashes", { self._simple_insert_txn(
"event_id": event_id, txn,
"algorithm": algorithm, "event_content_hashes",
"hash": buffer(hash_bytes), {
}) "event_id": event_id,
"algorithm": algorithm,
"hash": buffer(hash_bytes),
},
or_ignore=True,
)
def _get_event_reference_hashes_txn(self, txn, event_id): def _get_event_reference_hashes_txn(self, txn, event_id):
"""Get all the hashes for a given PDU. """Get all the hashes for a given PDU.
@ -212,11 +217,16 @@ class SignatureStore(SQLBaseStore):
algorithm (str): Hashing algorithm. algorithm (str): Hashing algorithm.
hash_bytes (bytes): Hash function output bytes. hash_bytes (bytes): Hash function output bytes.
""" """
self._simple_insert_txn(txn, "event_reference_hashes", { self._simple_insert_txn(
"event_id": event_id, txn,
"algorithm": algorithm, "event_reference_hashes",
"hash": buffer(hash_bytes), {
}) "event_id": event_id,
"algorithm": algorithm,
"hash": buffer(hash_bytes),
},
or_ignore=True,
)
def _get_event_origin_signatures_txn(self, txn, event_id): def _get_event_origin_signatures_txn(self, txn, event_id):
@ -245,12 +255,17 @@ class SignatureStore(SQLBaseStore):
key_id (str): Id for the signing key. key_id (str): Id for the signing key.
signature (bytes): The signature. signature (bytes): The signature.
""" """
self._simple_insert_txn(txn, "event_origin_signatures", { self._simple_insert_txn(
"event_id": event_id, txn,
"origin": origin, "event_origin_signatures",
"key_id": key_id, {
"signature": buffer(signature_bytes), "event_id": event_id,
}) "origin": origin,
"key_id": key_id,
"signature": buffer(signature_bytes),
},
or_ignore=True,
)
def _get_prev_event_hashes_txn(self, txn, event_id): def _get_prev_event_hashes_txn(self, txn, event_id):
"""Get all the hashes for previous PDUs of a PDU """Get all the hashes for previous PDUs of a PDU
@ -274,9 +289,14 @@ class SignatureStore(SQLBaseStore):
def _store_prev_event_hash_txn(self, txn, event_id, prev_event_id, def _store_prev_event_hash_txn(self, txn, event_id, prev_event_id,
algorithm, hash_bytes): algorithm, hash_bytes):
self._simple_insert_txn(txn, "event_edge_hashes", { self._simple_insert_txn(
"event_id": event_id, txn,
"prev_event_id": prev_event_id, "event_edge_hashes",
"algorithm": algorithm, {
"hash": buffer(hash_bytes), "event_id": event_id,
}) "prev_event_id": prev_event_id,
"algorithm": algorithm,
"hash": buffer(hash_bytes),
},
or_ignore=True,
)