From ce84dd9e207d9ae88e4cf9ca8a9731fcac043969 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Wed, 22 Jan 2020 15:09:57 +0000 Subject: [PATCH 01/67] Remove unnecessary abstractions in admin handler (#6751) --- changelog.d/6751.misc | 1 + synapse/handlers/admin.py | 62 ------------------- synapse/rest/admin/users.py | 19 +++--- .../storage/data_stores/main/registration.py | 2 +- 4 files changed, 11 insertions(+), 73 deletions(-) create mode 100644 changelog.d/6751.misc diff --git a/changelog.d/6751.misc b/changelog.d/6751.misc new file mode 100644 index 000000000000..722252052818 --- /dev/null +++ b/changelog.d/6751.misc @@ -0,0 +1 @@ +Remove some unnecessary admin handler abstraction methods. \ No newline at end of file diff --git a/synapse/handlers/admin.py b/synapse/handlers/admin.py index 60a7c938bc84..9205865231cf 100644 --- a/synapse/handlers/admin.py +++ b/synapse/handlers/admin.py @@ -62,68 +62,6 @@ async def get_user(self, user): ret["avatar_url"] = profile.avatar_url return ret - async def get_users(self): - """Function to retrieve a list of users in users table. - - Args: - Returns: - defer.Deferred: resolves to list[dict[str, Any]] - """ - ret = await self.store.get_users() - - return ret - - async def get_users_paginate(self, start, limit, name, guests, deactivated): - """Function to retrieve a paginated list of users from - users list. This will return a json list of users. - - Args: - start (int): start number to begin the query from - limit (int): number of rows to retrieve - name (string): filter for user names - guests (bool): whether to in include guest users - deactivated (bool): whether to include deactivated users - Returns: - defer.Deferred: resolves to json list[dict[str, Any]] - """ - ret = await self.store.get_users_paginate( - start, limit, name, guests, deactivated - ) - - return ret - - async def search_users(self, term): - """Function to search users list for one or more users with - the matched term. - - Args: - term (str): search term - Returns: - defer.Deferred: resolves to list[dict[str, Any]] - """ - ret = await self.store.search_users(term) - - return ret - - def get_user_server_admin(self, user): - """ - Get the admin bit on a user. - - Args: - user_id (UserID): the (necessarily local) user to manipulate - """ - return self.store.is_server_admin(user) - - def set_user_server_admin(self, user, admin): - """ - Set the admin bit on a user. - - Args: - user_id (UserID): the (necessarily local) user to manipulate - admin (bool): whether or not the user should be an admin of this server - """ - return self.store.set_server_admin(user, admin) - async def export_user_data(self, user_id, writer): """Write all data we have on the user to the given writer. diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index 52d27fa3e383..927e9ca9eed6 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -45,6 +45,7 @@ class UsersRestServlet(RestServlet): def __init__(self, hs): self.hs = hs + self.store = hs.get_datastore() self.auth = hs.get_auth() self.admin_handler = hs.get_handlers().admin_handler @@ -55,7 +56,7 @@ async def on_GET(self, request, user_id): if not self.hs.is_mine(target_user): raise SynapseError(400, "Can only users a local user") - ret = await self.admin_handler.get_users() + ret = await self.store.get_users() return 200, ret @@ -80,6 +81,7 @@ class UsersRestServletV2(RestServlet): def __init__(self, hs): self.hs = hs + self.store = hs.get_datastore() self.auth = hs.get_auth() self.admin_handler = hs.get_handlers().admin_handler @@ -92,7 +94,7 @@ async def on_GET(self, request): guests = parse_boolean(request, "guests", default=True) deactivated = parse_boolean(request, "deactivated", default=False) - users = await self.admin_handler.get_users_paginate( + users = await self.store.get_users_paginate( start, limit, user_id, guests, deactivated ) ret = {"users": users} @@ -516,8 +518,8 @@ class SearchUsersRestServlet(RestServlet): PATTERNS = historical_admin_path_patterns("/search_users/(?P[^/]*)") def __init__(self, hs): - self.store = hs.get_datastore() self.hs = hs + self.store = hs.get_datastore() self.auth = hs.get_auth() self.handlers = hs.get_handlers() @@ -540,7 +542,7 @@ async def on_GET(self, request, target_user_id): term = parse_string(request, "term", required=True) logger.info("term: %s ", term) - ret = await self.handlers.admin_handler.search_users(term) + ret = await self.handlers.store.search_users(term) return 200, ret @@ -574,8 +576,8 @@ class UserAdminServlet(RestServlet): def __init__(self, hs): self.hs = hs + self.store = hs.get_datastore() self.auth = hs.get_auth() - self.handlers = hs.get_handlers() async def on_GET(self, request, user_id): await assert_requester_is_admin(self.auth, request) @@ -585,8 +587,7 @@ async def on_GET(self, request, user_id): if not self.hs.is_mine(target_user): raise SynapseError(400, "Only local users can be admins of this homeserver") - is_admin = await self.handlers.admin_handler.get_user_server_admin(target_user) - is_admin = bool(is_admin) + is_admin = await self.store.is_server_admin(target_user) return 200, {"admin": is_admin} @@ -609,8 +610,6 @@ async def on_PUT(self, request, user_id): if target_user == auth_user and not set_admin_to: raise SynapseError(400, "You may not demote yourself.") - await self.handlers.admin_handler.set_user_server_admin( - target_user, set_admin_to - ) + await self.store.set_user_server_admin(target_user, set_admin_to) return 200, {} diff --git a/synapse/storage/data_stores/main/registration.py b/synapse/storage/data_stores/main/registration.py index cb4b2b39a0ae..49306642ed42 100644 --- a/synapse/storage/data_stores/main/registration.py +++ b/synapse/storage/data_stores/main/registration.py @@ -291,7 +291,7 @@ def is_server_admin(self, user): desc="is_server_admin", ) - return res if res else False + return bool(res) if res else False def set_server_admin(self, user, admin): """Sets whether a user is an admin of this homeserver. From 5bd3cb7260984164c4c54eb2add1fa7821795360 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 23 Jan 2020 12:03:58 +0000 Subject: [PATCH 02/67] Minor fixes to user admin api (#6761) * don't insist on a password (this is valid if you have an SSO login) * fix reference to undefined `requester` --- changelog.d/6761.bugfix | 1 + synapse/rest/admin/users.py | 14 +++++--------- 2 files changed, 6 insertions(+), 9 deletions(-) create mode 100644 changelog.d/6761.bugfix diff --git a/changelog.d/6761.bugfix b/changelog.d/6761.bugfix new file mode 100644 index 000000000000..1c664c02dff3 --- /dev/null +++ b/changelog.d/6761.bugfix @@ -0,0 +1 @@ +Minor fixes to `PUT /_synapse/admin/v2/users` admin api. diff --git a/synapse/rest/admin/users.py b/synapse/rest/admin/users.py index 927e9ca9eed6..345574119514 100644 --- a/synapse/rest/admin/users.py +++ b/synapse/rest/admin/users.py @@ -153,7 +153,8 @@ async def on_GET(self, request, user_id): return 200, ret async def on_PUT(self, request, user_id): - await assert_requester_is_admin(self.auth, request) + requester = await self.auth.get_user_by_req(request) + await assert_user_is_admin(self.auth, requester.user) target_user = UserID.from_string(user_id) body = parse_json_object_from_request(request) @@ -164,8 +165,6 @@ async def on_PUT(self, request, user_id): user = await self.admin_handler.get_user(target_user) if user: # modify user - requester = await self.auth.get_user_by_req(request) - if "displayname" in body: await self.profile_handler.set_displayname( target_user, requester, body["displayname"], True @@ -212,11 +211,8 @@ async def on_PUT(self, request, user_id): return 200, user else: # create user - if "password" not in body: - raise SynapseError( - 400, "password must be specified", errcode=Codes.BAD_JSON - ) - elif ( + password = body.get("password") + if password is not None and ( not isinstance(body["password"], text_type) or len(body["password"]) > 512 ): @@ -231,7 +227,7 @@ async def on_PUT(self, request, user_id): user_id = await self.registration_handler.register_user( localpart=target_user.localpart, - password=body["password"], + password=password, admin=bool(admin), default_display_name=displayname, user_type=user_type, From 6b7462a13fff8f4e1dbad0bb4d81bbe0515af7c1 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 23 Jan 2020 12:11:44 +0000 Subject: [PATCH 03/67] a bit of debugging for media storage providers (#6757) * a bit of debugging for media storage providers * changelog --- changelog.d/6757.misc | 1 + synapse/rest/media/v1/media_storage.py | 1 + synapse/rest/media/v1/storage_provider.py | 6 ++++++ 3 files changed, 8 insertions(+) create mode 100644 changelog.d/6757.misc diff --git a/changelog.d/6757.misc b/changelog.d/6757.misc new file mode 100644 index 000000000000..a50c5e974a23 --- /dev/null +++ b/changelog.d/6757.misc @@ -0,0 +1 @@ +Add some debugging for media storage providers. diff --git a/synapse/rest/media/v1/media_storage.py b/synapse/rest/media/v1/media_storage.py index 3b87717a5aa4..683a79c96641 100644 --- a/synapse/rest/media/v1/media_storage.py +++ b/synapse/rest/media/v1/media_storage.py @@ -148,6 +148,7 @@ def fetch_media(self, file_info): for provider in self.storage_providers: res = yield provider.fetch(path, file_info) if res: + logger.debug("Streaming %s from %s", path, provider) return res return None diff --git a/synapse/rest/media/v1/storage_provider.py b/synapse/rest/media/v1/storage_provider.py index 37687ea7f4ec..858680be266c 100644 --- a/synapse/rest/media/v1/storage_provider.py +++ b/synapse/rest/media/v1/storage_provider.py @@ -77,6 +77,9 @@ def __init__(self, backend, store_local, store_synchronous, store_remote): self.store_synchronous = store_synchronous self.store_remote = store_remote + def __str__(self): + return "StorageProviderWrapper[%s]" % (self.backend,) + def store_file(self, path, file_info): if not file_info.server_name and not self.store_local: return defer.succeed(None) @@ -114,6 +117,9 @@ def __init__(self, hs, config): self.cache_directory = hs.config.media_store_path self.base_directory = config + def __str__(self): + return "FileStorageProviderBackend[%s]" % (self.base_directory,) + def store_file(self, path, file_info): """See StorageProvider.store_file""" From fa4d609e20318821e2ffbeb35bfddbc86be81be0 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 23 Jan 2020 15:19:03 +0000 Subject: [PATCH 04/67] Make 'event.redacts' never raise. (#6771) There are quite a few places that we assume that a redaction event has a corresponding `redacts` key, which is not always the case. So lets cheekily make it so that event.redacts just returns None instead. --- changelog.d/6771.bugfix | 1 + synapse/events/__init__.py | 28 ++++++++++++--- synapse/storage/data_stores/main/events.py | 2 +- .../storage/data_stores/main/events_worker.py | 2 +- tests/storage/test_redaction.py | 35 +++++++++++++++++++ 5 files changed, 62 insertions(+), 6 deletions(-) create mode 100644 changelog.d/6771.bugfix diff --git a/changelog.d/6771.bugfix b/changelog.d/6771.bugfix new file mode 100644 index 000000000000..623ba24acb83 --- /dev/null +++ b/changelog.d/6771.bugfix @@ -0,0 +1 @@ +Fix persisting redaction events that have been redacted (or otherwise don't have a redacts key). diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 88ed6d764f34..72c09327f43a 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -116,16 +116,32 @@ def is_redacted(self): return getattr(self, "redacted", False) -def _event_dict_property(key): +_SENTINEL = object() + + +def _event_dict_property(key, default=_SENTINEL): + """Creates a new property for the given key that delegates access to + `self._event_dict`. + + The default is used if the key is missing from the `_event_dict`, if given, + otherwise an AttributeError will be raised. + + Note: If a default is given then `hasattr` will always return true. + """ + # We want to be able to use hasattr with the event dict properties. # However, (on python3) hasattr expects AttributeError to be raised. Hence, # we need to transform the KeyError into an AttributeError - def getter(self): + + def getter_raises(self): try: return self._event_dict[key] except KeyError: raise AttributeError(key) + def getter_default(self): + return self._event_dict.get(key, default) + def setter(self, v): try: self._event_dict[key] = v @@ -138,7 +154,11 @@ def delete(self): except KeyError: raise AttributeError(key) - return property(getter, setter, delete) + if default is _SENTINEL: + # No default given, so use the getter that raises + return property(getter_raises, setter, delete) + else: + return property(getter_default, setter, delete) class EventBase(object): @@ -165,7 +185,7 @@ def __init__( origin = _event_dict_property("origin") origin_server_ts = _event_dict_property("origin_server_ts") prev_events = _event_dict_property("prev_events") - redacts = _event_dict_property("redacts") + redacts = _event_dict_property("redacts", None) room_id = _event_dict_property("room_id") sender = _event_dict_property("sender") user_id = _event_dict_property("sender") diff --git a/synapse/storage/data_stores/main/events.py b/synapse/storage/data_stores/main/events.py index 596daf8909ea..ce553566a5a5 100644 --- a/synapse/storage/data_stores/main/events.py +++ b/synapse/storage/data_stores/main/events.py @@ -951,7 +951,7 @@ def _update_metadata_tables_txn( elif event.type == EventTypes.Message: # Insert into the event_search table. self._store_room_message_txn(txn, event) - elif event.type == EventTypes.Redaction: + elif event.type == EventTypes.Redaction and event.redacts is not None: # Insert into the redactions table. self._store_redaction(txn, event) elif event.type == EventTypes.Retention: diff --git a/synapse/storage/data_stores/main/events_worker.py b/synapse/storage/data_stores/main/events_worker.py index 3b93e0597a60..7251e819f5a3 100644 --- a/synapse/storage/data_stores/main/events_worker.py +++ b/synapse/storage/data_stores/main/events_worker.py @@ -287,7 +287,7 @@ def get_events_as_list( # we have to recheck auth now. if not allow_rejected and entry.event.type == EventTypes.Redaction: - if not hasattr(entry.event, "redacts"): + if entry.event.redacts is None: # A redacted redaction doesn't have a `redacts` key, in # which case lets just withhold the event. # diff --git a/tests/storage/test_redaction.py b/tests/storage/test_redaction.py index dc4517335520..feb1c07cb207 100644 --- a/tests/storage/test_redaction.py +++ b/tests/storage/test_redaction.py @@ -398,3 +398,38 @@ def test_redact_redaction(self): self.get_success( self.store.get_event(first_redact_event.event_id, allow_none=True) ) + + def test_store_redacted_redaction(self): + """Tests that we can store a redacted redaction. + """ + + self.get_success( + self.inject_room_member(self.room1, self.u_alice, Membership.JOIN) + ) + + builder = self.event_builder_factory.for_room_version( + RoomVersions.V1, + { + "type": EventTypes.Redaction, + "sender": self.u_alice.to_string(), + "room_id": self.room1.to_string(), + "content": {"reason": "foo"}, + }, + ) + + redaction_event, context = self.get_success( + self.event_creation_handler.create_new_client_event(builder) + ) + + self.get_success( + self.storage.persistence.persist_event(redaction_event, context) + ) + + # Now lets jump to the future where we have censored the redaction event + # in the DB. + self.reactor.advance(60 * 60 * 24 * 31) + + # We just want to check that fetching the event doesn't raise an exception. + self.get_success( + self.store.get_event(redaction_event.event_id, allow_none=True) + ) From aa6ad288f16ed263b2a94b480259639d52ff6cad Mon Sep 17 00:00:00 2001 From: Jason Robinson Date: Fri, 24 Jan 2020 11:01:57 +0200 Subject: [PATCH 05/67] Clarifications to the workers documentation * Add note that user_dir requires disabling user dir updates from the main synapse process. * Add note that federation_reader should have the federation listener resource. Signed-off-by: Jason Robinson --- changelog.d/6775.doc | 1 + docs/workers.md | 7 +++++++ 2 files changed, 8 insertions(+) create mode 100644 changelog.d/6775.doc diff --git a/changelog.d/6775.doc b/changelog.d/6775.doc new file mode 100644 index 000000000000..9421250f8be1 --- /dev/null +++ b/changelog.d/6775.doc @@ -0,0 +1 @@ +Clarify documentation related to user_dir and federation_reader workers. diff --git a/docs/workers.md b/docs/workers.md index 0ab269fd96bb..a5d6d18f2313 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -185,6 +185,9 @@ reverse-proxy configuration. The `^/_matrix/federation/v1/send/` endpoint must only be handled by a single instance. +Note that the `worker_listeners.resources.name` needs to be set to `federation` +for this worker. + ### `synapse.app.federation_sender` Handles sending federation traffic to other servers. Doesn't handle any @@ -265,6 +268,10 @@ the following regular expressions: ^/_matrix/client/(api/v1|r0|unstable)/user_directory/search$ +When using this worker you must also set `update_user_directory: False` in the +shared configuration file to stop the main synapse running background +jobs related to updating the user directory. + ### `synapse.app.frontend_proxy` Proxies some frequently-requested client endpoints to add caching and remove From 9f7aaf90b5ef76416852f35201a851d45eccc0a1 Mon Sep 17 00:00:00 2001 From: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> Date: Fri, 24 Jan 2020 14:28:40 +0000 Subject: [PATCH 06/67] Validate client_secret parameter (#6767) --- changelog.d/6767.bugfix | 1 + synapse/handlers/identity.py | 4 +- synapse/rest/client/v2_alpha/account.py | 23 ++++++++--- synapse/rest/client/v2_alpha/register.py | 3 ++ synapse/util/stringutils.py | 17 ++++++++ tests/util/test_stringutils.py | 51 ++++++++++++++++++++++++ 6 files changed, 93 insertions(+), 6 deletions(-) create mode 100644 changelog.d/6767.bugfix create mode 100644 tests/util/test_stringutils.py diff --git a/changelog.d/6767.bugfix b/changelog.d/6767.bugfix new file mode 100644 index 000000000000..63c7c63315e9 --- /dev/null +++ b/changelog.d/6767.bugfix @@ -0,0 +1 @@ +Validate `client_secret` parameter using the regex provided by the Client-Server API, temporarily allowing `:` characters for older clients. The `:` character will be removed in a future release. diff --git a/synapse/handlers/identity.py b/synapse/handlers/identity.py index 000fbf090f83..23f07832e747 100644 --- a/synapse/handlers/identity.py +++ b/synapse/handlers/identity.py @@ -38,7 +38,7 @@ from synapse.config.emailconfig import ThreepidBehaviour from synapse.http.client import SimpleHttpClient from synapse.util.hash import sha256_and_url_safe_base64 -from synapse.util.stringutils import random_string +from synapse.util.stringutils import assert_valid_client_secret, random_string from ._base import BaseHandler @@ -84,6 +84,8 @@ def threepid_from_creds(self, id_server, creds): raise SynapseError( 400, "Missing param client_secret in creds", errcode=Codes.MISSING_PARAM ) + assert_valid_client_secret(client_secret) + session_id = creds.get("sid") if not session_id: raise SynapseError( diff --git a/synapse/rest/client/v2_alpha/account.py b/synapse/rest/client/v2_alpha/account.py index fc240f5cf818..dc837d6c7582 100644 --- a/synapse/rest/client/v2_alpha/account.py +++ b/synapse/rest/client/v2_alpha/account.py @@ -30,6 +30,7 @@ ) from synapse.push.mailer import Mailer, load_jinja2_templates from synapse.util.msisdn import phone_number_to_msisdn +from synapse.util.stringutils import assert_valid_client_secret from synapse.util.threepids import check_3pid_allowed from ._base import client_patterns, interactive_auth_handler @@ -81,6 +82,8 @@ async def on_POST(self, request): # Extract params from body client_secret = body["client_secret"] + assert_valid_client_secret(client_secret) + email = body["email"] send_attempt = body["send_attempt"] next_link = body.get("next_link") # Optional param @@ -166,8 +169,9 @@ async def on_GET(self, request, medium): ) sid = parse_string(request, "sid", required=True) - client_secret = parse_string(request, "client_secret", required=True) token = parse_string(request, "token", required=True) + client_secret = parse_string(request, "client_secret", required=True) + assert_valid_client_secret(client_secret) # Attempt to validate a 3PID session try: @@ -353,6 +357,8 @@ async def on_POST(self, request): body = parse_json_object_from_request(request) assert_params_in_dict(body, ["client_secret", "email", "send_attempt"]) client_secret = body["client_secret"] + assert_valid_client_secret(client_secret) + email = body["email"] send_attempt = body["send_attempt"] next_link = body.get("next_link") # Optional param @@ -413,6 +419,8 @@ async def on_POST(self, request): body, ["client_secret", "country", "phone_number", "send_attempt"] ) client_secret = body["client_secret"] + assert_valid_client_secret(client_secret) + country = body["country"] phone_number = body["phone_number"] send_attempt = body["send_attempt"] @@ -493,8 +501,9 @@ async def on_GET(self, request): ) sid = parse_string(request, "sid", required=True) - client_secret = parse_string(request, "client_secret", required=True) token = parse_string(request, "token", required=True) + client_secret = parse_string(request, "client_secret", required=True) + assert_valid_client_secret(client_secret) # Attempt to validate a 3PID session try: @@ -559,6 +568,7 @@ async def on_POST(self, request): body = parse_json_object_from_request(request) assert_params_in_dict(body, ["client_secret", "sid", "token"]) + assert_valid_client_secret(body["client_secret"]) # Proxy submit_token request to msisdn threepid delegate response = await self.identity_handler.proxy_msisdn_submit_token( @@ -600,8 +610,9 @@ async def on_POST(self, request): ) assert_params_in_dict(threepid_creds, ["client_secret", "sid"]) - client_secret = threepid_creds["client_secret"] sid = threepid_creds["sid"] + client_secret = threepid_creds["client_secret"] + assert_valid_client_secret(client_secret) validation_session = await self.identity_handler.validate_threepid_session( client_secret, sid @@ -637,8 +648,9 @@ async def on_POST(self, request): body = parse_json_object_from_request(request) assert_params_in_dict(body, ["client_secret", "sid"]) - client_secret = body["client_secret"] sid = body["sid"] + client_secret = body["client_secret"] + assert_valid_client_secret(client_secret) await self.auth_handler.validate_user_via_ui_auth( requester, body, self.hs.get_ip_from_request(request) @@ -676,8 +688,9 @@ async def on_POST(self, request): assert_params_in_dict(body, ["id_server", "sid", "client_secret"]) id_server = body["id_server"] sid = body["sid"] - client_secret = body["client_secret"] id_access_token = body.get("id_access_token") # optional + client_secret = body["client_secret"] + assert_valid_client_secret(client_secret) requester = await self.auth.get_user_by_req(request) user_id = requester.user.to_string() diff --git a/synapse/rest/client/v2_alpha/register.py b/synapse/rest/client/v2_alpha/register.py index 1bda9aec7ef6..a09189b1b469 100644 --- a/synapse/rest/client/v2_alpha/register.py +++ b/synapse/rest/client/v2_alpha/register.py @@ -49,6 +49,7 @@ from synapse.push.mailer import load_jinja2_templates from synapse.util.msisdn import phone_number_to_msisdn from synapse.util.ratelimitutils import FederationRateLimiter +from synapse.util.stringutils import assert_valid_client_secret from synapse.util.threepids import check_3pid_allowed from ._base import client_patterns, interactive_auth_handler @@ -116,6 +117,8 @@ async def on_POST(self, request): # Extract params from body client_secret = body["client_secret"] + assert_valid_client_secret(client_secret) + email = body["email"] send_attempt = body["send_attempt"] next_link = body.get("next_link") # Optional param diff --git a/synapse/util/stringutils.py b/synapse/util/stringutils.py index 982c6d81ca82..2c0dcb5208bd 100644 --- a/synapse/util/stringutils.py +++ b/synapse/util/stringutils.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,14 +15,22 @@ # limitations under the License. import random +import re import string import six from six import PY2, PY3 from six.moves import range +from synapse.api.errors import Codes, SynapseError + _string_with_symbols = string.digits + string.ascii_letters + ".,;:^&*-_+=#~@" +# https://matrix.org/docs/spec/client_server/r0.6.0#post-matrix-client-r0-register-email-requesttoken +# Note: The : character is allowed here for older clients, but will be removed in a +# future release. Context: https://github.com/matrix-org/synapse/issues/6766 +client_secret_regex = re.compile(r"^[0-9a-zA-Z\.\=\_\-\:]+$") + # random_string and random_string_with_symbols are used for a range of things, # some cryptographically important, some less so. We use SystemRandom to make sure # we get cryptographically-secure randoms. @@ -109,3 +118,11 @@ def exception_to_unicode(e): return msg.decode("utf-8", errors="replace") else: return msg + + +def assert_valid_client_secret(client_secret): + """Validate that a given string matches the client_secret regex defined by the spec""" + if client_secret_regex.match(client_secret) is None: + raise SynapseError( + 400, "Invalid client_secret parameter", errcode=Codes.INVALID_PARAM + ) diff --git a/tests/util/test_stringutils.py b/tests/util/test_stringutils.py new file mode 100644 index 000000000000..4f4da29a98ba --- /dev/null +++ b/tests/util/test_stringutils.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 The Matrix.org Foundation C.I.C. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from synapse.api.errors import SynapseError +from synapse.util.stringutils import assert_valid_client_secret + +from .. import unittest + + +class StringUtilsTestCase(unittest.TestCase): + def test_client_secret_regex(self): + """Ensure that client_secret does not contain illegal characters""" + good = [ + "abcde12345", + "ABCabc123", + "_--something==_", + "...--==-18913", + "8Dj2odd-e9asd.cd==_--ddas-secret-", + # We temporarily allow : characters: https://github.com/matrix-org/synapse/issues/6766 + # To be removed in a future release + "SECRET:1234567890", + ] + + bad = [ + "--+-/secret", + "\\dx--dsa288", + "", + "AAS//", + "asdj**", + ">X> Date: Mon, 27 Jan 2020 10:20:48 +0200 Subject: [PATCH 07/67] Fix federation_reader listeners doc as per PR review Signed-off-by: Jason Robinson --- docs/workers.md | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/docs/workers.md b/docs/workers.md index a5d6d18f2313..09a9d8a7b85c 100644 --- a/docs/workers.md +++ b/docs/workers.md @@ -185,8 +185,18 @@ reverse-proxy configuration. The `^/_matrix/federation/v1/send/` endpoint must only be handled by a single instance. -Note that the `worker_listeners.resources.name` needs to be set to `federation` -for this worker. +Note that `federation` must be added to the listener resources in the worker config: + +```yaml +worker_app: synapse.app.federation_reader +... +worker_listeners: + - type: http + port: + resources: + - names: + - federation +``` ### `synapse.app.federation_sender` From cf9d56e5cfa93dbd5f52df89e1fdb088755d811a Mon Sep 17 00:00:00 2001 From: Jason Robinson Date: Mon, 27 Jan 2020 14:09:59 +0200 Subject: [PATCH 08/67] Formatting of changelog Co-Authored-By: Brendan Abolivier --- changelog.d/6775.doc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog.d/6775.doc b/changelog.d/6775.doc index 9421250f8be1..c6078ef82d13 100644 --- a/changelog.d/6775.doc +++ b/changelog.d/6775.doc @@ -1 +1 @@ -Clarify documentation related to user_dir and federation_reader workers. +Clarify documentation related to `user_dir` and `federation_reader` workers. From d5275fc55f4edc42d1543825da2c13df63d96927 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 27 Jan 2020 13:47:50 +0000 Subject: [PATCH 09/67] Propagate cache invalidates from workers to other workers. (#6748) Currently if a worker invalidates a cache it will be streamed to master, which then didn't forward those to other workers. --- changelog.d/6748.misc | 1 + synapse/replication/tcp/protocol.py | 2 +- synapse/replication/tcp/resource.py | 9 ++++++--- synapse/storage/data_stores/main/cache.py | 22 +++++++++++++++++++++- 4 files changed, 29 insertions(+), 5 deletions(-) create mode 100644 changelog.d/6748.misc diff --git a/changelog.d/6748.misc b/changelog.d/6748.misc new file mode 100644 index 000000000000..de320d4cd9a7 --- /dev/null +++ b/changelog.d/6748.misc @@ -0,0 +1 @@ +Propagate cache invalidates from workers to other workers. diff --git a/synapse/replication/tcp/protocol.py b/synapse/replication/tcp/protocol.py index 131e5acb09ca..bc1482a9bbf2 100644 --- a/synapse/replication/tcp/protocol.py +++ b/synapse/replication/tcp/protocol.py @@ -459,7 +459,7 @@ async def on_REMOVE_PUSHER(self, cmd): await self.streamer.on_remove_pusher(cmd.app_id, cmd.push_key, cmd.user_id) async def on_INVALIDATE_CACHE(self, cmd): - self.streamer.on_invalidate_cache(cmd.cache_func, cmd.keys) + await self.streamer.on_invalidate_cache(cmd.cache_func, cmd.keys) async def on_REMOTE_SERVER_UP(self, cmd: RemoteServerUpCommand): self.streamer.on_remote_server_up(cmd.data) diff --git a/synapse/replication/tcp/resource.py b/synapse/replication/tcp/resource.py index 6ebf944f66a4..ce60ae2e07f1 100644 --- a/synapse/replication/tcp/resource.py +++ b/synapse/replication/tcp/resource.py @@ -17,7 +17,7 @@ import logging import random -from typing import List +from typing import Any, List from six import itervalues @@ -271,11 +271,14 @@ async def on_remove_pusher(self, app_id, push_key, user_id): self.notifier.on_new_replication_data() @measure_func("repl.on_invalidate_cache") - def on_invalidate_cache(self, cache_func, keys): + async def on_invalidate_cache(self, cache_func: str, keys: List[Any]): """The client has asked us to invalidate a cache """ invalidate_cache_counter.inc() - getattr(self.store, cache_func).invalidate(tuple(keys)) + + # We invalidate the cache locally, but then also stream that to other + # workers. + await self.store.invalidate_cache_and_stream(cache_func, tuple(keys)) @measure_func("repl.on_user_ip") async def on_user_ip( diff --git a/synapse/storage/data_stores/main/cache.py b/synapse/storage/data_stores/main/cache.py index afa2b41c989a..d4c44dcc7586 100644 --- a/synapse/storage/data_stores/main/cache.py +++ b/synapse/storage/data_stores/main/cache.py @@ -16,7 +16,7 @@ import itertools import logging -from typing import Any, Iterable, Optional +from typing import Any, Iterable, Optional, Tuple from twisted.internet import defer @@ -33,6 +33,26 @@ class CacheInvalidationStore(SQLBaseStore): + async def invalidate_cache_and_stream(self, cache_name: str, keys: Tuple[Any, ...]): + """Invalidates the cache and adds it to the cache stream so slaves + will know to invalidate their caches. + + This should only be used to invalidate caches where slaves won't + otherwise know from other replication streams that the cache should + be invalidated. + """ + cache_func = getattr(self, cache_name, None) + if not cache_func: + return + + cache_func.invalidate(keys) + await self.runInteraction( + "invalidate_cache_and_stream", + self._send_invalidation_to_replication, + cache_func.__name__, + keys, + ) + def _invalidate_cache_and_stream(self, txn, cache_func, keys): """Invalidates the cache and adds it to the cache stream so slaves will know to invalidate their caches. From 8df862e45d9848c226399c8e39d31497461516ff Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Mon, 27 Jan 2020 14:30:57 +0000 Subject: [PATCH 10/67] Add `rooms.room_version` column (#6729) This is so that we don't have to rely on pulling it out from `current_state_events` table. --- changelog.d/6729.misc | 1 + synapse/federation/federation_client.py | 50 ++++++---- synapse/handlers/federation.py | 65 ++++++++++--- synapse/handlers/room.py | 52 ++++++---- .../v2_alpha/room_upgrade_rest_servlet.py | 3 +- synapse/storage/data_stores/main/room.py | 94 +++++++++++++++++-- .../schema/delta/57/rooms_version_column.sql | 24 +++++ synapse/storage/data_stores/main/state.py | 34 ++++--- tests/storage/test_room.py | 7 +- tests/storage/test_state.py | 5 +- tests/utils.py | 8 ++ 11 files changed, 270 insertions(+), 73 deletions(-) create mode 100644 changelog.d/6729.misc create mode 100644 synapse/storage/data_stores/main/schema/delta/57/rooms_version_column.sql diff --git a/changelog.d/6729.misc b/changelog.d/6729.misc new file mode 100644 index 000000000000..5537355bea8e --- /dev/null +++ b/changelog.d/6729.misc @@ -0,0 +1 @@ +Record room versions in the `rooms` table. diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index af652a76596c..d57e8ca7a280 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -17,6 +17,7 @@ import copy import itertools import logging +from typing import Dict, Iterable from prometheus_client import Counter @@ -29,6 +30,7 @@ FederationDeniedError, HttpResponseException, SynapseError, + UnsupportedRoomVersionError, ) from synapse.api.room_versions import ( KNOWN_ROOM_VERSIONS, @@ -385,6 +387,8 @@ def _try_destination_list(self, description, destinations, callback): return res except InvalidResponseError as e: logger.warning("Failed to %s via %s: %s", description, destination, e) + except UnsupportedRoomVersionError: + raise except HttpResponseException as e: if not 500 <= e.code < 600: raise e.to_synapse_error() @@ -404,7 +408,13 @@ def _try_destination_list(self, description, destinations, callback): raise SynapseError(502, "Failed to %s via any server" % (description,)) def make_membership_event( - self, destinations, room_id, user_id, membership, content, params + self, + destinations: Iterable[str], + room_id: str, + user_id: str, + membership: str, + content: dict, + params: Dict[str, str], ): """ Creates an m.room.member event, with context, without participating in the room. @@ -417,21 +427,23 @@ def make_membership_event( Note that this does not append any events to any graphs. Args: - destinations (Iterable[str]): Candidate homeservers which are probably + destinations: Candidate homeservers which are probably participating in the room. - room_id (str): The room in which the event will happen. - user_id (str): The user whose membership is being evented. - membership (str): The "membership" property of the event. Must be - one of "join" or "leave". - content (dict): Any additional data to put into the content field - of the event. - params (dict[str, str|Iterable[str]]): Query parameters to include in the - request. + room_id: The room in which the event will happen. + user_id: The user whose membership is being evented. + membership: The "membership" property of the event. Must be one of + "join" or "leave". + content: Any additional data to put into the content field of the + event. + params: Query parameters to include in the request. Return: - Deferred[tuple[str, FrozenEvent, int]]: resolves to a tuple of - `(origin, event, event_format)` where origin is the remote - homeserver which generated the event, and event_format is one of - `synapse.api.room_versions.EventFormatVersions`. + Deferred[Tuple[str, FrozenEvent, RoomVersion]]: resolves to a tuple of + `(origin, event, room_version)` where origin is the remote + homeserver which generated the event, and room_version is the + version of the room. + + Fails with a `UnsupportedRoomVersionError` if remote responds with + a room version we don't understand. Fails with a ``SynapseError`` if the chosen remote server returns a 300/400 code. @@ -453,8 +465,12 @@ def send_request(destination): # Note: If not supplied, the room version may be either v1 or v2, # however either way the event format version will be v1. - room_version = ret.get("room_version", RoomVersions.V1.identifier) - event_format = room_version_to_event_format(room_version) + room_version_id = ret.get("room_version", RoomVersions.V1.identifier) + room_version = KNOWN_ROOM_VERSIONS.get(room_version_id) + if not room_version: + raise UnsupportedRoomVersionError() + + event_format = room_version_to_event_format(room_version_id) pdu_dict = ret.get("event", None) if not isinstance(pdu_dict, dict): @@ -478,7 +494,7 @@ def send_request(destination): event_dict=pdu_dict, ) - return (destination, ev, event_format) + return (destination, ev, room_version) return self._try_destination_list( "make_" + membership, destinations, send_request diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index d4f9a792fce4..f824ee79a0ba 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -44,10 +44,10 @@ StoreError, SynapseError, ) -from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions +from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion, RoomVersions from synapse.crypto.event_signing import compute_event_signature from synapse.event_auth import auth_types_for_event -from synapse.events import EventBase +from synapse.events import EventBase, room_version_to_event_format from synapse.events.snapshot import EventContext from synapse.events.validator import EventValidator from synapse.logging.context import ( @@ -703,8 +703,20 @@ async def _process_received_pdu( if not room: try: + prev_state_ids = await context.get_prev_state_ids() + create_event = await self.store.get_event( + prev_state_ids[(EventTypes.Create, "")] + ) + + room_version_id = create_event.content.get( + "room_version", RoomVersions.V1.identifier + ) + await self.store.store_room( - room_id=room_id, room_creator_user_id="", is_public=False + room_id=room_id, + room_creator_user_id="", + is_public=False, + room_version=KNOWN_ROOM_VERSIONS[room_version_id], ) except StoreError: logger.exception("Failed to store room.") @@ -1186,7 +1198,7 @@ def do_invite_join(self, target_hosts, room_id, joinee, content): """ logger.debug("Joining %s to %s", joinee, room_id) - origin, event, event_format_version = yield self._make_and_verify_event( + origin, event, room_version = yield self._make_and_verify_event( target_hosts, room_id, joinee, @@ -1214,6 +1226,8 @@ def do_invite_join(self, target_hosts, room_id, joinee, content): target_hosts.insert(0, origin) except ValueError: pass + + event_format_version = room_version_to_event_format(room_version.identifier) ret = yield self.federation_client.send_join( target_hosts, event, event_format_version ) @@ -1234,13 +1248,18 @@ def do_invite_join(self, target_hosts, room_id, joinee, content): try: yield self.store.store_room( - room_id=room_id, room_creator_user_id="", is_public=False + room_id=room_id, + room_creator_user_id="", + is_public=False, + room_version=room_version, ) except Exception: # FIXME pass - yield self._persist_auth_tree(origin, auth_chain, state, event) + yield self._persist_auth_tree( + origin, auth_chain, state, event, room_version + ) # Check whether this room is the result of an upgrade of a room we already know # about. If so, migrate over user information @@ -1486,7 +1505,7 @@ def on_invite_request(self, origin, pdu): @defer.inlineCallbacks def do_remotely_reject_invite(self, target_hosts, room_id, user_id, content): - origin, event, event_format_version = yield self._make_and_verify_event( + origin, event, room_version = yield self._make_and_verify_event( target_hosts, room_id, user_id, "leave", content=content ) # Mark as outlier as we don't have any state for this event; we're not @@ -1513,7 +1532,11 @@ def do_remotely_reject_invite(self, target_hosts, room_id, user_id, content): def _make_and_verify_event( self, target_hosts, room_id, user_id, membership, content={}, params=None ): - origin, event, format_ver = yield self.federation_client.make_membership_event( + ( + origin, + event, + room_version, + ) = yield self.federation_client.make_membership_event( target_hosts, room_id, user_id, membership, content, params=params ) @@ -1525,7 +1548,7 @@ def _make_and_verify_event( assert event.user_id == user_id assert event.state_key == user_id assert event.room_id == room_id - return origin, event, format_ver + return origin, event, room_version @defer.inlineCallbacks @log_function @@ -1810,7 +1833,14 @@ def prep(ev_info: _NewEventInfo): ) @defer.inlineCallbacks - def _persist_auth_tree(self, origin, auth_events, state, event): + def _persist_auth_tree( + self, + origin: str, + auth_events: List[EventBase], + state: List[EventBase], + event: EventBase, + room_version: RoomVersion, + ): """Checks the auth chain is valid (and passes auth checks) for the state and event. Then persists the auth chain and state atomically. Persists the event separately. Notifies about the persisted events @@ -1819,10 +1849,12 @@ def _persist_auth_tree(self, origin, auth_events, state, event): Will attempt to fetch missing auth events. Args: - origin (str): Where the events came from - auth_events (list) - state (list) - event (Event) + origin: Where the events came from + auth_events + state + event + room_version: The room version we expect this room to have, and + will raise if it doesn't match the version in the create event. Returns: Deferred @@ -1848,10 +1880,13 @@ def _persist_auth_tree(self, origin, auth_events, state, event): # invalid, and it would fail auth checks anyway. raise SynapseError(400, "No create event in state") - room_version = create_event.content.get( + room_version_id = create_event.content.get( "room_version", RoomVersions.V1.identifier ) + if room_version.identifier != room_version_id: + raise SynapseError(400, "Room version mismatch") + missing_auth_events = set() for e in itertools.chain(auth_events, state, [event]): for e_id in e.auth_event_ids(): diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 9f50196ea76a..a9490782b7e6 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -29,7 +29,7 @@ from synapse.api.constants import EventTypes, JoinRules, RoomCreationPreset from synapse.api.errors import AuthError, Codes, NotFoundError, StoreError, SynapseError -from synapse.api.room_versions import KNOWN_ROOM_VERSIONS +from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion from synapse.http.endpoint import parse_and_validate_server_name from synapse.storage.state import StateFilter from synapse.types import ( @@ -100,13 +100,15 @@ def __init__(self, hs): self.third_party_event_rules = hs.get_third_party_event_rules() @defer.inlineCallbacks - def upgrade_room(self, requester, old_room_id, new_version): + def upgrade_room( + self, requester: Requester, old_room_id: str, new_version: RoomVersion + ): """Replace a room with a new room with a different version Args: - requester (synapse.types.Requester): the user requesting the upgrade - old_room_id (unicode): the id of the room to be replaced - new_version (unicode): the new room version to use + requester: the user requesting the upgrade + old_room_id: the id of the room to be replaced + new_version: the new room version to use Returns: Deferred[unicode]: the new room id @@ -151,7 +153,7 @@ def _upgrade_room(self, requester, old_room_id, new_version): if r is None: raise NotFoundError("Unknown room id %s" % (old_room_id,)) new_room_id = yield self._generate_room_id( - creator_id=user_id, is_public=r["is_public"] + creator_id=user_id, is_public=r["is_public"], room_version=new_version, ) logger.info("Creating new room %s to replace %s", new_room_id, old_room_id) @@ -299,18 +301,22 @@ def _update_upgraded_room_pls( @defer.inlineCallbacks def clone_existing_room( - self, requester, old_room_id, new_room_id, new_room_version, tombstone_event_id + self, + requester: Requester, + old_room_id: str, + new_room_id: str, + new_room_version: RoomVersion, + tombstone_event_id: str, ): """Populate a new room based on an old room Args: - requester (synapse.types.Requester): the user requesting the upgrade - old_room_id (unicode): the id of the room to be replaced - new_room_id (unicode): the id to give the new room (should already have been + requester: the user requesting the upgrade + old_room_id : the id of the room to be replaced + new_room_id: the id to give the new room (should already have been created with _gemerate_room_id()) - new_room_version (unicode): the new room version to use - tombstone_event_id (unicode|str): the ID of the tombstone event in the old - room. + new_room_version: the new room version to use + tombstone_event_id: the ID of the tombstone event in the old room. Returns: Deferred """ @@ -320,7 +326,7 @@ def clone_existing_room( raise SynapseError(403, "You are not permitted to create rooms") creation_content = { - "room_version": new_room_version, + "room_version": new_room_version.identifier, "predecessor": {"room_id": old_room_id, "event_id": tombstone_event_id}, } @@ -577,14 +583,15 @@ def create_room(self, requester, config, ratelimit=True, creator_join_profile=No if ratelimit: yield self.ratelimit(requester) - room_version = config.get( + room_version_id = config.get( "room_version", self.config.default_room_version.identifier ) - if not isinstance(room_version, string_types): + if not isinstance(room_version_id, string_types): raise SynapseError(400, "room_version must be a string", Codes.BAD_JSON) - if room_version not in KNOWN_ROOM_VERSIONS: + room_version = KNOWN_ROOM_VERSIONS.get(room_version_id) + if room_version is None: raise SynapseError( 400, "Your homeserver does not support this room version", @@ -631,7 +638,9 @@ def create_room(self, requester, config, ratelimit=True, creator_join_profile=No visibility = config.get("visibility", None) is_public = visibility == "public" - room_id = yield self._generate_room_id(creator_id=user_id, is_public=is_public) + room_id = yield self._generate_room_id( + creator_id=user_id, is_public=is_public, room_version=room_version, + ) directory_handler = self.hs.get_handlers().directory_handler if room_alias: @@ -660,7 +669,7 @@ def create_room(self, requester, config, ratelimit=True, creator_join_profile=No creation_content = config.get("creation_content", {}) # override any attempt to set room versions via the creation_content - creation_content["room_version"] = room_version + creation_content["room_version"] = room_version.identifier yield self._send_events_for_new_room( requester, @@ -849,7 +858,9 @@ def send(etype, content, **kwargs): yield send(etype=etype, state_key=state_key, content=content) @defer.inlineCallbacks - def _generate_room_id(self, creator_id, is_public): + def _generate_room_id( + self, creator_id: str, is_public: str, room_version: RoomVersion, + ): # autogen room IDs and try to create it. We may clash, so just # try a few times till one goes through, giving up eventually. attempts = 0 @@ -863,6 +874,7 @@ def _generate_room_id(self, creator_id, is_public): room_id=gen_room_id, room_creator_user_id=creator_id, is_public=is_public, + room_version=room_version, ) return gen_room_id except StoreError: diff --git a/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py b/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py index ca97330797ca..f357015a7001 100644 --- a/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py +++ b/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py @@ -64,7 +64,8 @@ async def on_POST(self, request, room_id): assert_params_in_dict(content, ("new_version",)) new_version = content["new_version"] - if new_version not in KNOWN_ROOM_VERSIONS: + new_version = KNOWN_ROOM_VERSIONS.get(content["new_version"]) + if new_version is None: raise SynapseError( 400, "Your homeserver does not support this room version", diff --git a/synapse/storage/data_stores/main/room.py b/synapse/storage/data_stores/main/room.py index d968803ad2fb..9a17e336ba16 100644 --- a/synapse/storage/data_stores/main/room.py +++ b/synapse/storage/data_stores/main/room.py @@ -29,9 +29,10 @@ from synapse.api.constants import EventTypes from synapse.api.errors import StoreError +from synapse.api.room_versions import RoomVersion, RoomVersions from synapse.storage._base import SQLBaseStore from synapse.storage.data_stores.main.search import SearchStore -from synapse.storage.database import Database +from synapse.storage.database import Database, LoggingTransaction from synapse.types import ThirdPartyInstanceID from synapse.util.caches.descriptors import cached, cachedInlineCallbacks @@ -734,6 +735,7 @@ def _quarantine_media_txn( class RoomBackgroundUpdateStore(SQLBaseStore): REMOVE_TOMESTONED_ROOMS_BG_UPDATE = "remove_tombstoned_rooms_from_directory" + ADD_ROOMS_ROOM_VERSION_COLUMN = "add_rooms_room_version_column" def __init__(self, database: Database, db_conn, hs): super(RoomBackgroundUpdateStore, self).__init__(database, db_conn, hs) @@ -749,6 +751,11 @@ def __init__(self, database: Database, db_conn, hs): self._remove_tombstoned_rooms_from_directory, ) + self.db.updates.register_background_update_handler( + self.ADD_ROOMS_ROOM_VERSION_COLUMN, + self._background_add_rooms_room_version_column, + ) + @defer.inlineCallbacks def _background_insert_retention(self, progress, batch_size): """Retrieves a list of all rooms within a range and inserts an entry for each of @@ -817,6 +824,73 @@ def _background_insert_retention_txn(txn): defer.returnValue(batch_size) + async def _background_add_rooms_room_version_column( + self, progress: dict, batch_size: int + ): + """Background update to go and add room version inforamtion to `rooms` + table from `current_state_events` table. + """ + + last_room_id = progress.get("room_id", "") + + def _background_add_rooms_room_version_column_txn(txn: LoggingTransaction): + sql = """ + SELECT room_id, json FROM current_state_events + INNER JOIN event_json USING (room_id, event_id) + WHERE room_id > ? AND type = 'm.room.create' AND state_key = '' + ORDER BY room_id + LIMIT ? + """ + + txn.execute(sql, (last_room_id, batch_size)) + + updates = [] + for room_id, event_json in txn: + event_dict = json.loads(event_json) + room_version_id = event_dict.get("content", {}).get( + "room_version", RoomVersions.V1.identifier + ) + + creator = event_dict.get("content").get("creator") + + updates.append((room_id, creator, room_version_id)) + + if not updates: + return True + + new_last_room_id = "" + for room_id, creator, room_version_id in updates: + # We upsert here just in case we don't already have a row, + # mainly for paranoia as much badness would happen if we don't + # insert the row and then try and get the room version for the + # room. + self.db.simple_upsert_txn( + txn, + table="rooms", + keyvalues={"room_id": room_id}, + values={"room_version": room_version_id}, + insertion_values={"is_public": False, "creator": creator}, + ) + new_last_room_id = room_id + + self.db.updates._background_update_progress_txn( + txn, self.ADD_ROOMS_ROOM_VERSION_COLUMN, {"room_id": new_last_room_id} + ) + + return False + + end = await self.db.runInteraction( + "_background_add_rooms_room_version_column", + _background_add_rooms_room_version_column_txn, + ) + + if end: + await self.db.updates._end_background_update( + self.ADD_ROOMS_ROOM_VERSION_COLUMN + ) + + return batch_size + async def _remove_tombstoned_rooms_from_directory( self, progress, batch_size ) -> int: @@ -881,14 +955,21 @@ def __init__(self, database: Database, db_conn, hs): self.config = hs.config @defer.inlineCallbacks - def store_room(self, room_id, room_creator_user_id, is_public): + def store_room( + self, + room_id: str, + room_creator_user_id: str, + is_public: bool, + room_version: RoomVersion, + ): """Stores a room. Args: - room_id (str): The desired room ID, can be None. - room_creator_user_id (str): The user ID of the room creator. - is_public (bool): True to indicate that this room should appear in - public room lists. + room_id: The desired room ID, can be None. + room_creator_user_id: The user ID of the room creator. + is_public: True to indicate that this room should appear in + public room lists. + room_version: The version of the room Raises: StoreError if the room could not be stored. """ @@ -902,6 +983,7 @@ def store_room_txn(txn, next_id): "room_id": room_id, "creator": room_creator_user_id, "is_public": is_public, + "room_version": room_version.identifier, }, ) if is_public: diff --git a/synapse/storage/data_stores/main/schema/delta/57/rooms_version_column.sql b/synapse/storage/data_stores/main/schema/delta/57/rooms_version_column.sql new file mode 100644 index 000000000000..352a66f5b0b0 --- /dev/null +++ b/synapse/storage/data_stores/main/schema/delta/57/rooms_version_column.sql @@ -0,0 +1,24 @@ +/* Copyright 2020 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +-- We want to start storing the room version independently of +-- `current_state_events` so that we can delete stale entries from it without +-- losing the information. +ALTER TABLE rooms ADD COLUMN room_version TEXT; + + +INSERT into background_updates (update_name, progress_json) + VALUES ('add_rooms_room_version_column', '{}'); diff --git a/synapse/storage/data_stores/main/state.py b/synapse/storage/data_stores/main/state.py index 33bebd1c485a..bd7b0276f14d 100644 --- a/synapse/storage/data_stores/main/state.py +++ b/synapse/storage/data_stores/main/state.py @@ -60,24 +60,34 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): def __init__(self, database: Database, db_conn, hs): super(StateGroupWorkerStore, self).__init__(database, db_conn, hs) - @defer.inlineCallbacks - def get_room_version(self, room_id): + @cached(max_entries=10000) + async def get_room_version(self, room_id: str) -> str: """Get the room_version of a given room - Args: - room_id (str) - - Returns: - Deferred[str] - Raises: - NotFoundError if the room is unknown + NotFoundError: if the room is unknown """ - # for now we do this by looking at the create event. We may want to cache this - # more intelligently in future. + + # First we try looking up room version from the database, but for old + # rooms we might not have added the room version to it yet so we fall + # back to previous behaviour and look in current state events. + + # We really should have an entry in the rooms table for every room we + # care about, but let's be a bit paranoid (at least while the background + # update is happening) to avoid breaking existing rooms. + version = await self.db.simple_select_one_onecol( + table="rooms", + keyvalues={"room_id": room_id}, + retcol="room_version", + desc="get_room_version", + allow_none=True, + ) + + if version is not None: + return version # Retrieve the room's create event - create_event = yield self.get_create_event_for_room(room_id) + create_event = await self.get_create_event_for_room(room_id) return create_event.content.get("room_version", "1") @defer.inlineCallbacks diff --git a/tests/storage/test_room.py b/tests/storage/test_room.py index 3ddaa151fefc..086adeb8fd39 100644 --- a/tests/storage/test_room.py +++ b/tests/storage/test_room.py @@ -17,6 +17,7 @@ from twisted.internet import defer from synapse.api.constants import EventTypes +from synapse.api.room_versions import RoomVersions from synapse.types import RoomAlias, RoomID, UserID from tests import unittest @@ -40,6 +41,7 @@ def setUp(self): self.room.to_string(), room_creator_user_id=self.u_creator.to_string(), is_public=True, + room_version=RoomVersions.V1, ) @defer.inlineCallbacks @@ -68,7 +70,10 @@ def setUp(self): self.room = RoomID.from_string("!abcde:test") yield self.store.store_room( - self.room.to_string(), room_creator_user_id="@creator:text", is_public=True + self.room.to_string(), + room_creator_user_id="@creator:text", + is_public=True, + room_version=RoomVersions.V1, ) @defer.inlineCallbacks diff --git a/tests/storage/test_state.py b/tests/storage/test_state.py index d6ecf102f894..04d58fbf2479 100644 --- a/tests/storage/test_state.py +++ b/tests/storage/test_state.py @@ -45,7 +45,10 @@ def setUp(self): self.room = RoomID.from_string("!abc123:test") yield self.store.store_room( - self.room.to_string(), room_creator_user_id="@creator:text", is_public=True + self.room.to_string(), + room_creator_user_id="@creator:text", + is_public=True, + room_version=RoomVersions.V1, ) @defer.inlineCallbacks diff --git a/tests/utils.py b/tests/utils.py index e2e9cafd791a..513f358f4fd0 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -639,9 +639,17 @@ def create_room(hs, room_id, creator_id): """ persistence_store = hs.get_storage().persistence + store = hs.get_datastore() event_builder_factory = hs.get_event_builder_factory() event_creation_handler = hs.get_event_creation_handler() + yield store.store_room( + room_id=room_id, + room_creator_user_id=creator_id, + is_public=False, + room_version=RoomVersions.V1, + ) + builder = event_builder_factory.for_room_version( RoomVersions.V1, { From 02b44db922f01a35787d2535a834c9774b68020b Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 28 Jan 2020 13:44:21 +0000 Subject: [PATCH 11/67] Warn if postgres database has non-C locale. (#6734) As using non-C locale can cause issues on upgrading OS. --- UPGRADE.rst | 9 +++++++ changelog.d/6734.bugfix | 1 + docs/postgres.md | 20 +++++++++++++- synapse/storage/engines/postgres.py | 42 +++++++++++++++++++++++++++++ synapse/storage/engines/sqlite.py | 5 ++++ synapse/storage/prepare_database.py | 5 ++++ 6 files changed, 81 insertions(+), 1 deletion(-) create mode 100644 changelog.d/6734.bugfix diff --git a/UPGRADE.rst b/UPGRADE.rst index a0202932b1cb..470246f12803 100644 --- a/UPGRADE.rst +++ b/UPGRADE.rst @@ -76,6 +76,15 @@ for example: dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb +Upgrading to **** +=============================== + +Synapse will now log a warning on start up if used with a PostgreSQL database +that has a non-recommended locale set. + +See [docs/postgres.md](docs/postgres.md) for details. + + Upgrading to v1.8.0 =================== diff --git a/changelog.d/6734.bugfix b/changelog.d/6734.bugfix new file mode 100644 index 000000000000..79c6bab4d1c3 --- /dev/null +++ b/changelog.d/6734.bugfix @@ -0,0 +1 @@ +Warn if postgres database has a non-C locale, as that can cause issues when upgrading locales (e.g. due to upgrading OS). diff --git a/docs/postgres.md b/docs/postgres.md index 7cb1ad18d4ec..e0793ecee86b 100644 --- a/docs/postgres.md +++ b/docs/postgres.md @@ -32,7 +32,7 @@ Assuming your PostgreSQL database user is called `postgres`, first authenticate su - postgres # Or, if your system uses sudo to get administrative rights sudo -u postgres bash - + Then, create a user ``synapse_user`` with: createuser --pwprompt synapse_user @@ -63,6 +63,24 @@ You may need to enable password authentication so `synapse_user` can connect to the database. See . +### Fixing incorrect `COLLATE` or `CTYPE` + +Synapse will refuse to set up a new database if it has the wrong values of +`COLLATE` and `CTYPE` set, and will log warnings on existing databases. Using +different locales can cause issues if the locale library is updated from +underneath the database, or if a different version of the locale is used on any +replicas. + +The safest way to fix the issue is to take a dump and recreate the database with +the correct `COLLATE` and `CTYPE` parameters (as per +[docs/postgres.md](docs/postgres.md)). It is also possible to change the +parameters on a live database and run a `REINDEX` on the entire database, +however extreme care must be taken to avoid database corruption. + +Note that the above may fail with an error about duplicate rows if corruption +has already occurred, and such duplicate rows will need to be manually removed. + + ## Tuning Postgres The default settings should be fine for most deployments. For larger diff --git a/synapse/storage/engines/postgres.py b/synapse/storage/engines/postgres.py index c84cb452b0c6..a077345960a9 100644 --- a/synapse/storage/engines/postgres.py +++ b/synapse/storage/engines/postgres.py @@ -13,8 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + from ._base import IncorrectDatabaseSetup +logger = logging.getLogger(__name__) + class PostgresEngine(object): single_threaded = False @@ -52,6 +56,44 @@ def check_database(self, db_conn, allow_outdated_version: bool = False): "See docs/postgres.rst for more information." % (rows[0][0],) ) + txn.execute( + "SELECT datcollate, datctype FROM pg_database WHERE datname = current_database()" + ) + collation, ctype = txn.fetchone() + if collation != "C": + logger.warning( + "Database has incorrect collation of %r. Should be 'C'", collation + ) + + if ctype != "C": + logger.warning( + "Database has incorrect ctype of %r. Should be 'C'", ctype + ) + + def check_new_database(self, txn): + """Gets called when setting up a brand new database. This allows us to + apply stricter checks on new databases versus existing database. + """ + + txn.execute( + "SELECT datcollate, datctype FROM pg_database WHERE datname = current_database()" + ) + collation, ctype = txn.fetchone() + + errors = [] + + if collation != "C": + errors.append(" - 'COLLATE' is set to %r. Should be 'C'" % (collation,)) + + if ctype != "C": + errors.append(" - 'CTYPE' is set to %r. Should be 'C'" % (collation,)) + + if errors: + raise IncorrectDatabaseSetup( + "Database is incorrectly configured:\n\n%s\n\n" + "See docs/postgres.md for more information." % ("\n".join(errors)) + ) + def convert_param_style(self, sql): return sql.replace("?", "%s") diff --git a/synapse/storage/engines/sqlite.py b/synapse/storage/engines/sqlite.py index cbf52f51913e..641e49069758 100644 --- a/synapse/storage/engines/sqlite.py +++ b/synapse/storage/engines/sqlite.py @@ -59,6 +59,11 @@ def check_database(self, db_conn, allow_outdated_version: bool = False): if version < (3, 11, 0): raise RuntimeError("Synapse requires sqlite 3.11 or above.") + def check_new_database(self, txn): + """Gets called when setting up a brand new database. This allows us to + apply stricter checks on new databases versus existing database. + """ + def convert_param_style(self, sql): return sql diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index e86984cd50b5..c285ef52a0ba 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -136,6 +136,11 @@ def _setup_new_database(cur, database_engine, data_stores): data_stores (list[str]): The names of the data stores to instantiate on the given database. """ + + # We're about to set up a brand new database so we check that its + # configured to our liking. + database_engine.check_new_database(cur) + current_dir = os.path.join(dir_path, "schema", "full_schemas") directory_entries = os.listdir(current_dir) From a8ce7aeb433e08f46306797a1252668c178a7825 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 28 Jan 2020 14:18:29 +0000 Subject: [PATCH 12/67] Pass room version object into event_auth.check and check_redaction (#6788) These are easier to work with than the strings and we normally have one around. This fixes `FederationHander._persist_auth_tree` which was passing a RoomVersion object into event_auth.check instead of a string. --- changelog.d/6788.misc | 1 + synapse/api/auth.py | 7 +++++-- synapse/event_auth.py | 34 +++++++++++++++++++++------------- synapse/handlers/federation.py | 18 +++++++++++------- synapse/handlers/message.py | 8 ++++++-- synapse/state/v1.py | 4 ++-- synapse/state/v2.py | 4 +++- tests/test_event_auth.py | 11 ++++------- 8 files changed, 53 insertions(+), 34 deletions(-) create mode 100644 changelog.d/6788.misc diff --git a/changelog.d/6788.misc b/changelog.d/6788.misc new file mode 100644 index 000000000000..5537355bea8e --- /dev/null +++ b/changelog.d/6788.misc @@ -0,0 +1 @@ +Record room versions in the `rooms` table. diff --git a/synapse/api/auth.py b/synapse/api/auth.py index 2cbfab25696e..8b1277ad028e 100644 --- a/synapse/api/auth.py +++ b/synapse/api/auth.py @@ -33,6 +33,7 @@ MissingClientTokenError, ResourceLimitError, ) +from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.config.server import is_threepid_reserved from synapse.types import StateMap, UserID from synapse.util.caches import CACHE_SIZE_FACTOR, register_cache @@ -77,15 +78,17 @@ def __init__(self, hs): self._account_validity = hs.config.account_validity @defer.inlineCallbacks - def check_from_context(self, room_version, event, context, do_sig_check=True): + def check_from_context(self, room_version: str, event, context, do_sig_check=True): prev_state_ids = yield context.get_prev_state_ids() auth_events_ids = yield self.compute_auth_events( event, prev_state_ids, for_verification=True ) auth_events = yield self.store.get_events(auth_events_ids) auth_events = {(e.type, e.state_key): e for e in itervalues(auth_events)} + + room_version_obj = KNOWN_ROOM_VERSIONS[room_version] event_auth.check( - room_version, event, auth_events=auth_events, do_sig_check=do_sig_check + room_version_obj, event, auth_events=auth_events, do_sig_check=do_sig_check ) @defer.inlineCallbacks diff --git a/synapse/event_auth.py b/synapse/event_auth.py index e3a1ba47a005..016d5678e5f6 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2014 - 2016 OpenMarket Ltd +# Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -23,17 +24,27 @@ from synapse.api.constants import EventTypes, JoinRules, Membership from synapse.api.errors import AuthError, EventSizeError, SynapseError -from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, EventFormatVersions +from synapse.api.room_versions import ( + KNOWN_ROOM_VERSIONS, + EventFormatVersions, + RoomVersion, +) from synapse.types import UserID, get_domain_from_id logger = logging.getLogger(__name__) -def check(room_version, event, auth_events, do_sig_check=True, do_size_check=True): +def check( + room_version_obj: RoomVersion, + event, + auth_events, + do_sig_check=True, + do_size_check=True, +): """ Checks if this event is correctly authed. Args: - room_version (str): the version of the room + room_version_obj: the version of the room event: the event being checked. auth_events (dict: event-key -> event): the existing room state. @@ -97,10 +108,11 @@ def check(room_version, event, auth_events, do_sig_check=True, do_size_check=Tru 403, "Creation event's room_id domain does not match sender's" ) - room_version = event.content.get("room_version", "1") - if room_version not in KNOWN_ROOM_VERSIONS: + room_version_prop = event.content.get("room_version", "1") + if room_version_prop not in KNOWN_ROOM_VERSIONS: raise AuthError( - 403, "room appears to have unsupported version %s" % (room_version,) + 403, + "room appears to have unsupported version %s" % (room_version_prop,), ) # FIXME logger.debug("Allowing! %s", event) @@ -160,7 +172,7 @@ def check(room_version, event, auth_events, do_sig_check=True, do_size_check=Tru _check_power_levels(event, auth_events) if event.type == EventTypes.Redaction: - check_redaction(room_version, event, auth_events) + check_redaction(room_version_obj, event, auth_events) logger.debug("Allowing! %s", event) @@ -386,7 +398,7 @@ def _can_send_event(event, auth_events): return True -def check_redaction(room_version, event, auth_events): +def check_redaction(room_version_obj: RoomVersion, event, auth_events): """Check whether the event sender is allowed to redact the target event. Returns: @@ -406,11 +418,7 @@ def check_redaction(room_version, event, auth_events): if user_level >= redact_level: return False - v = KNOWN_ROOM_VERSIONS.get(room_version) - if not v: - raise RuntimeError("Unrecognized room version %r" % (room_version,)) - - if v.event_format == EventFormatVersions.V1: + if room_version_obj.event_format == EventFormatVersions.V1: redacter_domain = get_domain_from_id(event.event_id) redactee_domain = get_domain_from_id(event.redacts) if redacter_domain == redactee_domain: diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index f824ee79a0ba..180f165a7a8c 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -47,7 +47,7 @@ from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion, RoomVersions from synapse.crypto.event_signing import compute_event_signature from synapse.event_auth import auth_types_for_event -from synapse.events import EventBase, room_version_to_event_format +from synapse.events import EventBase from synapse.events.snapshot import EventContext from synapse.events.validator import EventValidator from synapse.logging.context import ( @@ -1198,7 +1198,7 @@ def do_invite_join(self, target_hosts, room_id, joinee, content): """ logger.debug("Joining %s to %s", joinee, room_id) - origin, event, room_version = yield self._make_and_verify_event( + origin, event, room_version_obj = yield self._make_and_verify_event( target_hosts, room_id, joinee, @@ -1227,7 +1227,7 @@ def do_invite_join(self, target_hosts, room_id, joinee, content): except ValueError: pass - event_format_version = room_version_to_event_format(room_version.identifier) + event_format_version = room_version_obj.event_format ret = yield self.federation_client.send_join( target_hosts, event, event_format_version ) @@ -1251,14 +1251,14 @@ def do_invite_join(self, target_hosts, room_id, joinee, content): room_id=room_id, room_creator_user_id="", is_public=False, - room_version=room_version, + room_version=room_version_obj, ) except Exception: # FIXME pass yield self._persist_auth_tree( - origin, auth_chain, state, event, room_version + origin, auth_chain, state, event, room_version_obj ) # Check whether this room is the result of an upgrade of a room we already know @@ -2022,6 +2022,7 @@ def _check_for_soft_fail( if do_soft_fail_check: room_version = yield self.store.get_room_version(event.room_id) + room_version_obj = KNOWN_ROOM_VERSIONS[room_version] # Calculate the "current state". if state is not None: @@ -2071,7 +2072,9 @@ def _check_for_soft_fail( } try: - event_auth.check(room_version, event, auth_events=current_auth_events) + event_auth.check( + room_version_obj, event, auth_events=current_auth_events + ) except AuthError as e: logger.warning("Soft-failing %r because %s", event, e) event.internal_metadata.soft_failed = True @@ -2155,6 +2158,7 @@ def do_auth(self, origin, event, context, auth_events): defer.Deferred[EventContext]: updated context object """ room_version = yield self.store.get_room_version(event.room_id) + room_version_obj = KNOWN_ROOM_VERSIONS[room_version] try: context = yield self._update_auth_events_and_context_for_auth( @@ -2172,7 +2176,7 @@ def do_auth(self, origin, event, context, auth_events): ) try: - event_auth.check(room_version, event, auth_events=auth_events) + event_auth.check(room_version_obj, event, auth_events=auth_events) except AuthError as e: logger.warning("Failed auth resolution for %r because %s", event, e) context.rejected = RejectedReason.AUTH_ERROR diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 8ea3aca2f405..9a0f661b9b0c 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -40,7 +40,7 @@ NotFoundError, SynapseError, ) -from synapse.api.room_versions import RoomVersions +from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions from synapse.api.urls import ConsentURIBuilder from synapse.events.validator import EventValidator from synapse.logging.context import run_in_background @@ -962,9 +962,13 @@ def is_inviter_member_event(e): ) auth_events = yield self.store.get_events(auth_events_ids) auth_events = {(e.type, e.state_key): e for e in auth_events.values()} + room_version = yield self.store.get_room_version(event.room_id) + room_version_obj = KNOWN_ROOM_VERSIONS[room_version] - if event_auth.check_redaction(room_version, event, auth_events=auth_events): + if event_auth.check_redaction( + room_version_obj, event, auth_events=auth_events + ): # this user doesn't have 'redact' rights, so we need to do some more # checks on the original event. Let's start by checking the original # event exists. diff --git a/synapse/state/v1.py b/synapse/state/v1.py index d6c34ce3b739..24b7c0faef54 100644 --- a/synapse/state/v1.py +++ b/synapse/state/v1.py @@ -281,7 +281,7 @@ def _resolve_auth_events(events, auth_events): try: # The signatures have already been checked at this point event_auth.check( - RoomVersions.V1.identifier, + RoomVersions.V1, event, auth_events, do_sig_check=False, @@ -299,7 +299,7 @@ def _resolve_normal_events(events, auth_events): try: # The signatures have already been checked at this point event_auth.check( - RoomVersions.V1.identifier, + RoomVersions.V1, event, auth_events, do_sig_check=False, diff --git a/synapse/state/v2.py b/synapse/state/v2.py index 6216fdd20459..531018c6a5fa 100644 --- a/synapse/state/v2.py +++ b/synapse/state/v2.py @@ -26,6 +26,7 @@ from synapse import event_auth from synapse.api.constants import EventTypes from synapse.api.errors import AuthError +from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.events import EventBase from synapse.types import StateMap @@ -402,6 +403,7 @@ def _iterative_auth_checks( Deferred[StateMap[str]]: Returns the final updated state """ resolved_state = base_state.copy() + room_version_obj = KNOWN_ROOM_VERSIONS[room_version] for event_id in event_ids: event = event_map[event_id] @@ -430,7 +432,7 @@ def _iterative_auth_checks( try: event_auth.check( - room_version, + room_version_obj, event, auth_events, do_sig_check=False, diff --git a/tests/test_event_auth.py b/tests/test_event_auth.py index 8b2741d27704..ca20b085a2de 100644 --- a/tests/test_event_auth.py +++ b/tests/test_event_auth.py @@ -37,7 +37,7 @@ def test_random_users_cannot_send_state_before_first_pl(self): # creator should be able to send state event_auth.check( - RoomVersions.V1.identifier, + RoomVersions.V1, _random_state_event(creator), auth_events, do_sig_check=False, @@ -47,7 +47,7 @@ def test_random_users_cannot_send_state_before_first_pl(self): self.assertRaises( AuthError, event_auth.check, - RoomVersions.V1.identifier, + RoomVersions.V1, _random_state_event(joiner), auth_events, do_sig_check=False, @@ -76,7 +76,7 @@ def test_state_default_level(self): self.assertRaises( AuthError, event_auth.check, - RoomVersions.V1.identifier, + RoomVersions.V1, _random_state_event(pleb), auth_events, do_sig_check=False, @@ -84,10 +84,7 @@ def test_state_default_level(self): # king should be able to send state event_auth.check( - RoomVersions.V1.identifier, - _random_state_event(king), - auth_events, - do_sig_check=False, + RoomVersions.V1, _random_state_event(king), auth_events, do_sig_check=False, ) From 49d3bca37b91fa092e13fd28c42dcf970fb86bb7 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 27 Jan 2020 16:14:54 +0000 Subject: [PATCH 13/67] Implement updated auth rules from MSC2260 --- synapse/api/room_versions.py | 16 ++++++++++++++++ synapse/event_auth.py | 24 +++++++++++++++++++----- 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/synapse/api/room_versions.py b/synapse/api/room_versions.py index c6f50fd7b9ab..cf7ee60d3ad4 100644 --- a/synapse/api/room_versions.py +++ b/synapse/api/room_versions.py @@ -57,6 +57,9 @@ class RoomVersion(object): state_res = attr.ib() # int; one of the StateResolutionVersions enforce_key_validity = attr.ib() # bool + # bool: before MSC2260, anyone was allowed to send an aliases event + special_case_aliases_auth = attr.ib(type=bool, default=False) + class RoomVersions(object): V1 = RoomVersion( @@ -65,6 +68,7 @@ class RoomVersions(object): EventFormatVersions.V1, StateResolutionVersions.V1, enforce_key_validity=False, + special_case_aliases_auth=True, ) V2 = RoomVersion( "2", @@ -72,6 +76,7 @@ class RoomVersions(object): EventFormatVersions.V1, StateResolutionVersions.V2, enforce_key_validity=False, + special_case_aliases_auth=True, ) V3 = RoomVersion( "3", @@ -79,6 +84,7 @@ class RoomVersions(object): EventFormatVersions.V2, StateResolutionVersions.V2, enforce_key_validity=False, + special_case_aliases_auth=True, ) V4 = RoomVersion( "4", @@ -86,6 +92,7 @@ class RoomVersions(object): EventFormatVersions.V3, StateResolutionVersions.V2, enforce_key_validity=False, + special_case_aliases_auth=True, ) V5 = RoomVersion( "5", @@ -93,6 +100,14 @@ class RoomVersions(object): EventFormatVersions.V3, StateResolutionVersions.V2, enforce_key_validity=True, + special_case_aliases_auth=True, + ) + MSC2260_DEV = RoomVersion( + "org.matrix.msc2260", + RoomDisposition.UNSTABLE, + EventFormatVersions.V3, + StateResolutionVersions.V2, + enforce_key_validity=True, ) @@ -104,5 +119,6 @@ class RoomVersions(object): RoomVersions.V3, RoomVersions.V4, RoomVersions.V5, + RoomVersions.MSC2260_DEV, ) } # type: Dict[str, RoomVersion] diff --git a/synapse/event_auth.py b/synapse/event_auth.py index 016d5678e5f6..3240e8a7b243 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -100,7 +100,12 @@ def check( if not event.signatures.get(event_id_domain): raise AuthError(403, "Event not signed by sending server") + # Implementation of https://matrix.org/docs/spec/rooms/v1#authorization-rules + # + # 1. If type is m.room.create: if event.type == EventTypes.Create: + # 1b. If the domain of the room_id does not match the domain of the sender, + # reject. sender_domain = get_domain_from_id(event.sender) room_id_domain = get_domain_from_id(event.room_id) if room_id_domain != sender_domain: @@ -108,40 +113,49 @@ def check( 403, "Creation event's room_id domain does not match sender's" ) + # 1c. If content.room_version is present and is not a recognised version, reject room_version_prop = event.content.get("room_version", "1") if room_version_prop not in KNOWN_ROOM_VERSIONS: raise AuthError( 403, "room appears to have unsupported version %s" % (room_version_prop,), ) - # FIXME + logger.debug("Allowing! %s", event) return + # 3. If event does not have a m.room.create in its auth_events, reject. creation_event = auth_events.get((EventTypes.Create, ""), None) - if not creation_event: raise AuthError(403, "No create event in auth events") + # additional check for m.federate creating_domain = get_domain_from_id(event.room_id) originating_domain = get_domain_from_id(event.sender) if creating_domain != originating_domain: if not _can_federate(event, auth_events): raise AuthError(403, "This room has been marked as unfederatable.") - # FIXME: Temp hack + # 4. If type is m.room.aliases if event.type == EventTypes.Aliases: + # 4a. If event has no state_key, reject if not event.is_state(): raise AuthError(403, "Alias event must be a state event") if not event.state_key: raise AuthError(403, "Alias event must have non-empty state_key") + + # 4b. If sender's domain doesn't matches [sic] state_key, reject sender_domain = get_domain_from_id(event.sender) if event.state_key != sender_domain: raise AuthError( 403, "Alias event's state_key does not match sender's domain" ) - logger.debug("Allowing! %s", event) - return + + # 4c. Otherwise, allow. + # This is removed by https://github.com/matrix-org/matrix-doc/pull/2260 + if room_version.special_case_aliases_auth: + logger.debug("Allowing! %s", event) + return if logger.isEnabledFor(logging.DEBUG): logger.debug("Auth events: %s", [a.event_id for a in auth_events.values()]) From 99e205fc214a65d307d4f5484321bcbb32a60b5f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 27 Jan 2020 16:16:16 +0000 Subject: [PATCH 14/67] changelog --- changelog.d/6787.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/6787.misc diff --git a/changelog.d/6787.misc b/changelog.d/6787.misc new file mode 100644 index 000000000000..82fe63617302 --- /dev/null +++ b/changelog.d/6787.misc @@ -0,0 +1 @@ +Implement updated auth rules from MSC2260. From fbe0a82c0d603b12d8c1d9a2a1121dafb5616213 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 28 Jan 2020 09:43:57 +0000 Subject: [PATCH 15/67] update changelog --- changelog.d/6787.feature | 1 + changelog.d/6787.misc | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 changelog.d/6787.feature delete mode 100644 changelog.d/6787.misc diff --git a/changelog.d/6787.feature b/changelog.d/6787.feature new file mode 100644 index 000000000000..df9e4b77abde --- /dev/null +++ b/changelog.d/6787.feature @@ -0,0 +1 @@ +Implement updated authorization rules for aliases events, from [MSC2260](https://github.com/matrix-org/matrix-doc/pull/2260). diff --git a/changelog.d/6787.misc b/changelog.d/6787.misc deleted file mode 100644 index 82fe63617302..000000000000 --- a/changelog.d/6787.misc +++ /dev/null @@ -1 +0,0 @@ -Implement updated auth rules from MSC2260. From e17a11066192354f6c6144135a14e7abe524f44c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 28 Jan 2020 14:43:21 +0000 Subject: [PATCH 16/67] Detect unknown remote devices and mark cache as stale (#6776) We just mark the fact that the cache may be stale in the database for now. --- changelog.d/6776.misc | 1 + synapse/handlers/devicemessage.py | 57 ++++++++++++++++++- synapse/handlers/federation.py | 20 +++++++ synapse/replication/slave/storage/devices.py | 2 +- synapse/storage/data_stores/main/devices.py | 29 ++++++++-- .../57/device_list_remote_cache_stale.sql | 25 ++++++++ 6 files changed, 126 insertions(+), 8 deletions(-) create mode 100644 changelog.d/6776.misc create mode 100644 synapse/storage/data_stores/main/schema/delta/57/device_list_remote_cache_stale.sql diff --git a/changelog.d/6776.misc b/changelog.d/6776.misc new file mode 100644 index 000000000000..4f9a4ac7a593 --- /dev/null +++ b/changelog.d/6776.misc @@ -0,0 +1 @@ +Detect unknown remote devices and mark cache as stale. diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py index 73b9e120f5bb..5c5fe77be2e1 100644 --- a/synapse/handlers/devicemessage.py +++ b/synapse/handlers/devicemessage.py @@ -14,6 +14,7 @@ # limitations under the License. import logging +from typing import Any, Dict from canonicaljson import json @@ -65,6 +66,9 @@ def on_direct_to_device_edu(self, origin, content): logger.warning("Request for keys for non-local user %s", user_id) raise SynapseError(400, "Not a user here") + if not by_device: + continue + messages_by_device = { device_id: { "content": message_content, @@ -73,8 +77,11 @@ def on_direct_to_device_edu(self, origin, content): } for device_id, message_content in by_device.items() } - if messages_by_device: - local_messages[user_id] = messages_by_device + local_messages[user_id] = messages_by_device + + yield self._check_for_unknown_devices( + message_type, sender_user_id, by_device + ) stream_id = yield self.store.add_messages_from_remote_to_device_inbox( origin, message_id, local_messages @@ -84,6 +91,52 @@ def on_direct_to_device_edu(self, origin, content): "to_device_key", stream_id, users=local_messages.keys() ) + @defer.inlineCallbacks + def _check_for_unknown_devices( + self, + message_type: str, + sender_user_id: str, + by_device: Dict[str, Dict[str, Any]], + ): + """Checks inbound device messages for unkown remote devices, and if + found marks the remote cache for the user as stale. + """ + + if message_type != "m.room_key_request": + return + + # Get the sending device IDs + requesting_device_ids = set() + for message_content in by_device.values(): + device_id = message_content.get("requesting_device_id") + requesting_device_ids.add(device_id) + + # Check if we are tracking the devices of the remote user. + room_ids = yield self.store.get_rooms_for_user(sender_user_id) + if not room_ids: + logger.info( + "Received device message from remote device we don't" + " share a room with: %s %s", + sender_user_id, + requesting_device_ids, + ) + return + + # If we are tracking check that we know about the sending + # devices. + cached_devices = yield self.store.get_cached_devices_for_user(sender_user_id) + + unknown_devices = requesting_device_ids - set(cached_devices) + if unknown_devices: + logger.info( + "Received device message from remote device not in our cache: %s %s", + sender_user_id, + unknown_devices, + ) + yield self.store.mark_remote_user_device_cache_as_stale(sender_user_id) + # TODO: Poke something to start trying to refetch user's + # keys. + @defer.inlineCallbacks def send_device_message(self, sender_user_id, message_type, messages): set_tag("number_of_messages", len(messages)) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 180f165a7a8c..a67020a25903 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -742,6 +742,26 @@ async def _process_received_pdu( user = UserID.from_string(event.state_key) await self.user_joined_room(user, room_id) + # For encrypted messages we check that we know about the sending device, + # if we don't then we mark the device cache for that user as stale. + if event.type == EventTypes.Encryption: + device_id = event.content.get("device_id") + if device_id is not None: + cached_devices = await self.store.get_cached_devices_for_user( + event.sender + ) + if device_id not in cached_devices: + logger.info( + "Received event from remote device not in our cache: %s %s", + event.sender, + device_id, + ) + await self.store.mark_remote_user_device_cache_as_stale( + event.sender + ) + # TODO: Poke something to start trying to refetch user's + # keys. + @log_function async def backfill(self, dest, room_id, limit, extremities): """ Trigger a backfill request to `dest` for the given `room_id` diff --git a/synapse/replication/slave/storage/devices.py b/synapse/replication/slave/storage/devices.py index dc625e0d7a06..1c77687eea9b 100644 --- a/synapse/replication/slave/storage/devices.py +++ b/synapse/replication/slave/storage/devices.py @@ -72,6 +72,6 @@ def _invalidate_caches_for_devices(self, token, user_id, destination): destination, token ) - self._get_cached_devices_for_user.invalidate((user_id,)) + self.get_cached_devices_for_user.invalidate((user_id,)) self._get_cached_user_device.invalidate_many((user_id,)) self.get_device_list_last_stream_id_for_remote.invalidate((user_id,)) diff --git a/synapse/storage/data_stores/main/devices.py b/synapse/storage/data_stores/main/devices.py index f0a7962dd071..30bf66b2b69f 100644 --- a/synapse/storage/data_stores/main/devices.py +++ b/synapse/storage/data_stores/main/devices.py @@ -457,7 +457,7 @@ def get_user_devices_from_cache(self, query_list): device = yield self._get_cached_user_device(user_id, device_id) results.setdefault(user_id, {})[device_id] = device else: - results[user_id] = yield self._get_cached_devices_for_user(user_id) + results[user_id] = yield self.get_cached_devices_for_user(user_id) set_tag("in_cache", results) set_tag("not_in_cache", user_ids_not_in_cache) @@ -475,12 +475,12 @@ def _get_cached_user_device(self, user_id, device_id): return db_to_json(content) @cachedInlineCallbacks() - def _get_cached_devices_for_user(self, user_id): + def get_cached_devices_for_user(self, user_id): devices = yield self.db.simple_select_list( table="device_lists_remote_cache", keyvalues={"user_id": user_id}, retcols=("device_id", "content"), - desc="_get_cached_devices_for_user", + desc="get_cached_devices_for_user", ) return { device["device_id"]: db_to_json(device["content"]) for device in devices @@ -641,6 +641,18 @@ def get_device_list_last_stream_id_for_remotes(self, user_ids): return results + def mark_remote_user_device_cache_as_stale(self, user_id: str): + """Records that the server has reason to believe the cache of the devices + for the remote users is out of date. + """ + return self.db.simple_upsert( + table="device_lists_remote_resync", + keyvalues={"user_id": user_id}, + values={}, + insertion_values={"added_ts": self._clock.time_msec()}, + desc="make_remote_user_device_cache_as_stale", + ) + class DeviceBackgroundUpdateStore(SQLBaseStore): def __init__(self, database: Database, db_conn, hs): @@ -887,7 +899,7 @@ def _update_remote_device_list_cache_entry_txn( ) txn.call_after(self._get_cached_user_device.invalidate, (user_id, device_id)) - txn.call_after(self._get_cached_devices_for_user.invalidate, (user_id,)) + txn.call_after(self.get_cached_devices_for_user.invalidate, (user_id,)) txn.call_after( self.get_device_list_last_stream_id_for_remote.invalidate, (user_id,) ) @@ -902,6 +914,13 @@ def _update_remote_device_list_cache_entry_txn( lock=False, ) + # If we're replacing the remote user's device list cache presumably + # we've done a full resync, so we remove the entry that says we need + # to resync + self.db.simple_delete_txn( + txn, table="device_lists_remote_resync", keyvalues={"user_id": user_id}, + ) + def update_remote_device_list_cache(self, user_id, devices, stream_id): """Replace the entire cache of the remote user's devices. @@ -942,7 +961,7 @@ def _update_remote_device_list_cache_txn(self, txn, user_id, devices, stream_id) ], ) - txn.call_after(self._get_cached_devices_for_user.invalidate, (user_id,)) + txn.call_after(self.get_cached_devices_for_user.invalidate, (user_id,)) txn.call_after(self._get_cached_user_device.invalidate_many, (user_id,)) txn.call_after( self.get_device_list_last_stream_id_for_remote.invalidate, (user_id,) diff --git a/synapse/storage/data_stores/main/schema/delta/57/device_list_remote_cache_stale.sql b/synapse/storage/data_stores/main/schema/delta/57/device_list_remote_cache_stale.sql new file mode 100644 index 000000000000..c3b6de20999b --- /dev/null +++ b/synapse/storage/data_stores/main/schema/delta/57/device_list_remote_cache_stale.sql @@ -0,0 +1,25 @@ +/* Copyright 2020 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- Records whether the server thinks that the remote users cached device lists +-- may be out of date (e.g. if we have received a to device message from a +-- device we don't know about). +CREATE TABLE IF NOT EXISTS device_lists_remote_resync ( + user_id TEXT NOT NULL, + added_ts BIGINT NOT NULL +); + +CREATE UNIQUE INDEX device_lists_remote_resync_idx ON device_lists_remote_resync (user_id); +CREATE INDEX device_lists_remote_resync_ts_idx ON device_lists_remote_resync (added_ts); From a1f307f7d1ca6d4f83f9f43272a4b152cfdee299 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 28 Jan 2020 14:55:22 +0000 Subject: [PATCH 17/67] fix bad variable ref --- synapse/event_auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/synapse/event_auth.py b/synapse/event_auth.py index 3240e8a7b243..472f16504403 100644 --- a/synapse/event_auth.py +++ b/synapse/event_auth.py @@ -153,7 +153,7 @@ def check( # 4c. Otherwise, allow. # This is removed by https://github.com/matrix-org/matrix-doc/pull/2260 - if room_version.special_case_aliases_auth: + if room_version_obj.special_case_aliases_auth: logger.debug("Allowing! %s", event) return From fcfb591b312d6ec124c67aef2136a2d5948cadbe Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 28 Jan 2020 18:59:48 +0000 Subject: [PATCH 18/67] Fix outbound federation request metrics (#6795) --- changelog.d/6795.bugfix | 1 + synapse/http/matrixfederationclient.py | 4 ++++ 2 files changed, 5 insertions(+) create mode 100644 changelog.d/6795.bugfix diff --git a/changelog.d/6795.bugfix b/changelog.d/6795.bugfix new file mode 100644 index 000000000000..d1585653b149 --- /dev/null +++ b/changelog.d/6795.bugfix @@ -0,0 +1 @@ +Fix outbound federation request metrics. diff --git a/synapse/http/matrixfederationclient.py b/synapse/http/matrixfederationclient.py index 16765d54e009..6f1bb04d8b50 100644 --- a/synapse/http/matrixfederationclient.py +++ b/synapse/http/matrixfederationclient.py @@ -408,6 +408,8 @@ def _send_request( _sec_timeout, ) + outgoing_requests_counter.labels(method_bytes).inc() + try: with Measure(self.clock, "outbound_request"): # we don't want all the fancy cookie and redirect handling @@ -440,6 +442,8 @@ def _send_request( response.phrase.decode("ascii", errors="replace"), ) + incoming_responses_counter.labels(method_bytes, response.code).inc() + set_tag(tags.HTTP_STATUS_CODE, response.code) if 200 <= response.code < 300: From 2cad8baa7030a86efc103599d79412741654dc15 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Jan 2020 09:56:41 +0000 Subject: [PATCH 19/67] Fix bug when querying remote user keys that require a resync. (#6796) We ended up only returning a single device, rather than all of them. --- changelog.d/6796.bugfix | 1 + synapse/handlers/e2e_keys.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 changelog.d/6796.bugfix diff --git a/changelog.d/6796.bugfix b/changelog.d/6796.bugfix new file mode 100644 index 000000000000..206a15731151 --- /dev/null +++ b/changelog.d/6796.bugfix @@ -0,0 +1 @@ +Fix bug where querying a remote user's device keys that weren't cached resulted in only returning a single device. diff --git a/synapse/handlers/e2e_keys.py b/synapse/handlers/e2e_keys.py index 2d889364d4bc..95a9d71f4112 100644 --- a/synapse/handlers/e2e_keys.py +++ b/synapse/handlers/e2e_keys.py @@ -208,8 +208,9 @@ def do_remote_query(destination): ) user_devices = user_devices["devices"] + user_results = results.setdefault(user_id, {}) for device in user_devices: - results[user_id] = {device["device_id"]: device["keys"]} + user_results[device["device_id"]] = device["keys"] user_ids_updated.append(user_id) except Exception as e: failures[destination] = _exception_to_failure(e) From 611215a49cedf8d5f63c53168173763731d02260 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Jan 2020 11:01:32 +0000 Subject: [PATCH 20/67] Delete current state when server leaves a room (#6792) Otherwise its just stale data, which may get deleted later anyway so can't be relied on. It's also a bit of a shotgun if we're trying to get the current state of a room we're not in. --- changelog.d/6792.misc | 1 + synapse/storage/data_stores/main/events.py | 183 +++++++++++++-------- synapse/storage/persist_events.py | 89 +++++++++- 3 files changed, 198 insertions(+), 75 deletions(-) create mode 100644 changelog.d/6792.misc diff --git a/changelog.d/6792.misc b/changelog.d/6792.misc new file mode 100644 index 000000000000..fa31d509b39f --- /dev/null +++ b/changelog.d/6792.misc @@ -0,0 +1 @@ +Delete current state from the database when server leaves a room. diff --git a/synapse/storage/data_stores/main/events.py b/synapse/storage/data_stores/main/events.py index ce553566a5a5..c9d0d68c3a86 100644 --- a/synapse/storage/data_stores/main/events.py +++ b/synapse/storage/data_stores/main/events.py @@ -32,6 +32,7 @@ import synapse.metrics from synapse.api.constants import EventContentFields, EventTypes from synapse.api.errors import SynapseError +from synapse.api.room_versions import RoomVersions from synapse.events import EventBase # noqa: F401 from synapse.events.snapshot import EventContext # noqa: F401 from synapse.events.utils import prune_event_dict @@ -468,84 +469,93 @@ def _update_current_state_txn( to_delete = delta_state.to_delete to_insert = delta_state.to_insert - # First we add entries to the current_state_delta_stream. We - # do this before updating the current_state_events table so - # that we can use it to calculate the `prev_event_id`. (This - # allows us to not have to pull out the existing state - # unnecessarily). - # - # The stream_id for the update is chosen to be the minimum of the stream_ids - # for the batch of the events that we are persisting; that means we do not - # end up in a situation where workers see events before the - # current_state_delta updates. - # - sql = """ - INSERT INTO current_state_delta_stream - (stream_id, room_id, type, state_key, event_id, prev_event_id) - SELECT ?, ?, ?, ?, ?, ( - SELECT event_id FROM current_state_events - WHERE room_id = ? AND type = ? AND state_key = ? + if delta_state.no_longer_in_room: + # Server is no longer in the room so we delete the room from + # current_state_events, being careful we've already updated the + # rooms.room_version column (which gets populated in a + # background task). + self._upsert_room_version_txn(txn, room_id) + + # Before deleting we populate the current_state_delta_stream + # so that async background tasks get told what happened. + sql = """ + INSERT INTO current_state_delta_stream + (stream_id, room_id, type, state_key, event_id, prev_event_id) + SELECT ?, room_id, type, state_key, null, event_id + FROM current_state_events + WHERE room_id = ? + """ + txn.execute(sql, (stream_id, room_id)) + + self.db.simple_delete_txn( + txn, table="current_state_events", keyvalues={"room_id": room_id}, ) - """ - txn.executemany( - sql, - ( - ( - stream_id, - room_id, - etype, - state_key, - None, - room_id, - etype, - state_key, + else: + # We're still in the room, so we update the current state as normal. + + # First we add entries to the current_state_delta_stream. We + # do this before updating the current_state_events table so + # that we can use it to calculate the `prev_event_id`. (This + # allows us to not have to pull out the existing state + # unnecessarily). + # + # The stream_id for the update is chosen to be the minimum of the stream_ids + # for the batch of the events that we are persisting; that means we do not + # end up in a situation where workers see events before the + # current_state_delta updates. + # + sql = """ + INSERT INTO current_state_delta_stream + (stream_id, room_id, type, state_key, event_id, prev_event_id) + SELECT ?, ?, ?, ?, ?, ( + SELECT event_id FROM current_state_events + WHERE room_id = ? AND type = ? AND state_key = ? ) - for etype, state_key in to_delete - # We sanity check that we're deleting rather than updating - if (etype, state_key) not in to_insert - ), - ) - txn.executemany( - sql, - ( + """ + txn.executemany( + sql, ( - stream_id, - room_id, - etype, - state_key, - ev_id, - room_id, - etype, - state_key, - ) - for (etype, state_key), ev_id in iteritems(to_insert) - ), - ) + ( + stream_id, + room_id, + etype, + state_key, + to_insert.get((etype, state_key)), + room_id, + etype, + state_key, + ) + for etype, state_key in itertools.chain(to_delete, to_insert) + ), + ) + # Now we actually update the current_state_events table - # Now we actually update the current_state_events table + txn.executemany( + "DELETE FROM current_state_events" + " WHERE room_id = ? AND type = ? AND state_key = ?", + ( + (room_id, etype, state_key) + for etype, state_key in itertools.chain(to_delete, to_insert) + ), + ) - txn.executemany( - "DELETE FROM current_state_events" - " WHERE room_id = ? AND type = ? AND state_key = ?", - ( - (room_id, etype, state_key) - for etype, state_key in itertools.chain(to_delete, to_insert) - ), - ) + # We include the membership in the current state table, hence we do + # a lookup when we insert. This assumes that all events have already + # been inserted into room_memberships. + txn.executemany( + """INSERT INTO current_state_events + (room_id, type, state_key, event_id, membership) + VALUES (?, ?, ?, ?, (SELECT membership FROM room_memberships WHERE event_id = ?)) + """, + [ + (room_id, key[0], key[1], ev_id, ev_id) + for key, ev_id in iteritems(to_insert) + ], + ) - # We include the membership in the current state table, hence we do - # a lookup when we insert. This assumes that all events have already - # been inserted into room_memberships. - txn.executemany( - """INSERT INTO current_state_events - (room_id, type, state_key, event_id, membership) - VALUES (?, ?, ?, ?, (SELECT membership FROM room_memberships WHERE event_id = ?)) - """, - [ - (room_id, key[0], key[1], ev_id, ev_id) - for key, ev_id in iteritems(to_insert) - ], - ) + # We now update `local_current_membership`. We do this regardless + # of whether we're still in the room or not to handle the case where + # e.g. we just got banned (where we need to record that fact here). # Note: Do we really want to delete rows here (that we do not # subsequently reinsert below)? While technically correct it means @@ -601,6 +611,35 @@ def _update_current_state_txn( self._invalidate_state_caches_and_stream(txn, room_id, members_changed) + def _upsert_room_version_txn(self, txn: LoggingTransaction, room_id: str): + """Update the room version in the database based off current state + events. + + This is used when we're about to delete current state and we want to + ensure that the `rooms.room_version` column is up to date. + """ + + sql = """ + SELECT json FROM event_json + INNER JOIN current_state_events USING (room_id, event_id) + WHERE room_id = ? AND type = ? AND state_key = ? + """ + txn.execute(sql, (room_id, EventTypes.Create, "")) + row = txn.fetchone() + if row: + event_json = json.loads(row[0]) + content = event_json.get("content", {}) + creator = content.get("creator") + room_version_id = content.get("room_version", RoomVersions.V1.identifier) + + self.db.simple_upsert_txn( + txn, + table="rooms", + keyvalues={"room_id": room_id}, + values={"room_version": room_version_id}, + insertion_values={"is_public": False, "creator": creator}, + ) + def _update_forward_extremities_txn( self, txn, new_forward_extremities, max_stream_order ): diff --git a/synapse/storage/persist_events.py b/synapse/storage/persist_events.py index 368c457321e7..d060c8b99267 100644 --- a/synapse/storage/persist_events.py +++ b/synapse/storage/persist_events.py @@ -15,6 +15,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import itertools import logging from collections import deque, namedtuple from typing import Iterable, List, Optional, Tuple @@ -27,7 +28,7 @@ from twisted.internet import defer -from synapse.api.constants import EventTypes +from synapse.api.constants import EventTypes, Membership from synapse.events import FrozenEvent from synapse.events.snapshot import EventContext from synapse.logging.context import PreserveLoggingContext, make_deferred_yieldable @@ -72,17 +73,20 @@ ) -@attr.s(slots=True, frozen=True) +@attr.s(slots=True) class DeltaState: """Deltas to use to update the `current_state_events` table. Attributes: to_delete: List of type/state_keys to delete from current state to_insert: Map of state to upsert into current state + no_longer_in_room: The server is not longer in the room, so the room + should e.g. be removed from `current_state_events` table. """ to_delete = attr.ib(type=List[Tuple[str, str]]) to_insert = attr.ib(type=StateMap[str]) + no_longer_in_room = attr.ib(type=bool, default=False) class _EventPeristenceQueue(object): @@ -396,11 +400,12 @@ async def _persist_events( # If either are not None then there has been a change, # and we need to work out the delta (or use that # given) + delta = None if delta_ids is not None: # If there is a delta we know that we've # only added or replaced state, never # removed keys entirely. - state_delta_for_room[room_id] = DeltaState([], delta_ids) + delta = DeltaState([], delta_ids) elif current_state is not None: with Measure( self._clock, "persist_events.calculate_state_delta" @@ -408,6 +413,22 @@ async def _persist_events( delta = await self._calculate_state_delta( room_id, current_state ) + + if delta: + # If we have a change of state then lets check + # whether we're actually still a member of the room, + # or if our last user left. If we're no longer in + # the room then we delete the current state and + # extremities. + is_still_joined = await self._is_server_still_joined( + room_id, ev_ctx_rm, delta, current_state + ) + if not is_still_joined: + logger.info("Server no longer in room %s", room_id) + latest_event_ids = [] + current_state = {} + delta.no_longer_in_room = True + state_delta_for_room[room_id] = delta # If we have the current_state then lets prefill @@ -660,3 +681,65 @@ async def _calculate_state_delta( } return DeltaState(to_delete=to_delete, to_insert=to_insert) + + async def _is_server_still_joined( + self, + room_id: str, + ev_ctx_rm: List[Tuple[FrozenEvent, EventContext]], + delta: DeltaState, + current_state: Optional[StateMap[str]], + ) -> bool: + """Check if the server will still be joined after the given events have + been persised. + + Args: + room_id + ev_ctx_rm + delta: The delta of current state between what is in the database + and what the new current state will be. + current_state: The new current state if it already been calculated, + otherwise None. + """ + + if not any( + self.is_mine_id(state_key) + for typ, state_key in itertools.chain(delta.to_delete, delta.to_insert) + if typ == EventTypes.Member + ): + # There have been no changes to membership of our users, so nothing + # has changed and we assume we're still in the room. + return True + + # Check if any of the given events are a local join that appear in the + # current state + for (typ, state_key), event_id in delta.to_insert.items(): + if typ != EventTypes.Member or not self.is_mine_id(state_key): + continue + + for event, _ in ev_ctx_rm: + if event_id == event.event_id: + if event.membership == Membership.JOIN: + return True + + # There's been a change of membership but we don't have a local join + # event in the new events, so we need to check the full state. + if current_state is None: + current_state = await self.main_store.get_current_state_ids(room_id) + current_state = dict(current_state) + for key in delta.to_delete: + current_state.pop(key, None) + + current_state.update(delta.to_insert) + + event_ids = [ + event_id + for (typ, state_key,), event_id in current_state.items() + if typ == EventTypes.Member and self.is_mine_id(state_key) + ] + + rows = await self.main_store.get_membership_from_event_ids(event_ids) + is_still_joined = any(row["membership"] == Membership.JOIN for row in rows) + if is_still_joined: + return True + else: + return False From 6b9e1014cf9c107f3198999159fbc935376fdcc9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Jan 2020 11:23:01 +0000 Subject: [PATCH 21/67] Fix race in federation sender that delayed device updates. (#6799) We were sending device updates down both the federation stream and device streams. This mean there was a race if the federation sender worker processed the federation stream first, as when the sender checked if there were new device updates the slaved ID generator hadn't been updated with the new stream IDs and so returned nothing. This situation is correctly handled by events/receipts/etc by not sending updates down the federation stream and instead having the federation sender worker listen on the other streams and poke the transaction queues as appropriate. --- changelog.d/6799.bugfix | 1 + synapse/app/federation_sender.py | 20 +++++++++++++++++++- synapse/federation/send_queue.py | 32 +++----------------------------- 3 files changed, 23 insertions(+), 30 deletions(-) create mode 100644 changelog.d/6799.bugfix diff --git a/changelog.d/6799.bugfix b/changelog.d/6799.bugfix new file mode 100644 index 000000000000..322a2758affa --- /dev/null +++ b/changelog.d/6799.bugfix @@ -0,0 +1 @@ +Fix race in federation sender worker that delayed sending of device updates. diff --git a/synapse/app/federation_sender.py b/synapse/app/federation_sender.py index 38d11fdd0ff7..63a91f1177f5 100644 --- a/synapse/app/federation_sender.py +++ b/synapse/app/federation_sender.py @@ -38,7 +38,11 @@ from synapse.replication.slave.storage.registration import SlavedRegistrationStore from synapse.replication.slave.storage.transactions import SlavedTransactionStore from synapse.replication.tcp.client import ReplicationClientHandler -from synapse.replication.tcp.streams._base import ReceiptsStream +from synapse.replication.tcp.streams._base import ( + DeviceListsStream, + ReceiptsStream, + ToDeviceStream, +) from synapse.server import HomeServer from synapse.storage.database import Database from synapse.types import ReadReceipt @@ -256,6 +260,20 @@ def process_replication_rows(self, stream_name, token, rows): "process_receipts_for_federation", self._on_new_receipts, rows ) + # ... as well as device updates and messages + elif stream_name == DeviceListsStream.NAME: + hosts = set(row.destination for row in rows) + for host in hosts: + self.federation_sender.send_device_messages(host) + + elif stream_name == ToDeviceStream.NAME: + # The to_device stream includes stuff to be pushed to both local + # clients and remote servers, so we ignore entities that start with + # '@' (since they'll be local users rather than destinations). + hosts = set(row.entity for row in rows if not row.entity.startswith("@")) + for host in hosts: + self.federation_sender.send_device_messages(host) + @defer.inlineCallbacks def _on_new_receipts(self, rows): """ diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index 174f6e42becc..0bb82a6bb3e5 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -69,8 +69,6 @@ def __init__(self, hs): self.edus = SortedDict() # stream position -> Edu - self.device_messages = SortedDict() # stream position -> destination - self.pos = 1 self.pos_time = SortedDict() @@ -92,7 +90,6 @@ def register(name, queue): "keyed_edu", "keyed_edu_changed", "edus", - "device_messages", "pos_time", "presence_destinations", ]: @@ -171,12 +168,6 @@ def _clear_queue_before_pos(self, position_to_delete): for key in keys[:i]: del self.edus[key] - # Delete things out of device map - keys = self.device_messages.keys() - i = self.device_messages.bisect_left(position_to_delete) - for key in keys[:i]: - del self.device_messages[key] - def notify_new_events(self, current_id): """As per FederationSender""" # We don't need to replicate this as it gets sent down a different @@ -249,9 +240,8 @@ def send_presence_to_destinations(self, states, destinations): def send_device_messages(self, destination): """As per FederationSender""" - pos = self._next_pos() - self.device_messages[pos] = destination - self.notifier.on_new_replication_data() + # We don't need to replicate this as it gets sent down a different + # stream. def get_current_token(self): return self.pos - 1 @@ -339,14 +329,6 @@ async def get_replication_rows( for (pos, edu) in edus: rows.append((pos, EduRow(edu))) - # Fetch changed device messages - i = self.device_messages.bisect_right(from_token) - j = self.device_messages.bisect_right(to_token) + 1 - device_messages = {v: k for k, v in self.device_messages.items()[i:j]} - - for (destination, pos) in iteritems(device_messages): - rows.append((pos, DeviceRow(destination=destination))) - # Sort rows based on pos rows.sort() @@ -504,7 +486,6 @@ def add_to_buffer(self, buff): "presence_destinations", # list of tuples of UserPresenceState and destinations "keyed_edus", # dict of destination -> { key -> Edu } "edus", # dict of destination -> [Edu] - "device_destinations", # set of destinations ), ) @@ -523,11 +504,7 @@ def process_rows_for_federation(transaction_queue, rows): # them into the appropriate collection and then send them off. buff = ParsedFederationStreamData( - presence=[], - presence_destinations=[], - keyed_edus={}, - edus={}, - device_destinations=set(), + presence=[], presence_destinations=[], keyed_edus={}, edus={}, ) # Parse the rows in the stream and add to the buffer @@ -555,6 +532,3 @@ def process_rows_for_federation(transaction_queue, rows): for destination, edu_list in iteritems(buff.edus): for edu in edu_list: transaction_queue.send_edu(edu, None) - - for destination in buff.device_destinations: - transaction_queue.send_device_messages(destination) From ee42a5513e020424daa736962fee7fb69bd2373a Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 28 Jan 2020 11:02:55 +0000 Subject: [PATCH 22/67] Factor out a `copy_power_levels_contents` method I'm going to need another copy (hah!) of this. --- synapse/events/utils.py | 37 ++++++++++++++++++++++++++++++- synapse/handlers/room.py | 23 ++++++++++--------- tests/events/test_utils.py | 45 ++++++++++++++++++++++++++++++++++++-- 3 files changed, 90 insertions(+), 15 deletions(-) diff --git a/synapse/events/utils.py b/synapse/events/utils.py index 07d1c5bcf045..be57c6d9be43 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -12,8 +12,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +import collections import re +from typing import Mapping, Union from six import string_types @@ -422,3 +423,37 @@ def serialize_events(self, events, time_now, **kwargs): return yieldable_gather_results( self.serialize_event, events, time_now=time_now, **kwargs ) + + +def copy_power_levels_contents( + old_power_levels: Mapping[str, Union[int, Mapping[str, int]]] +): + """Copy the content of a power_levels event, unfreezing frozendicts along the way + + Raises: + TypeError if the input does not look like a valid power levels event content + """ + if not isinstance(old_power_levels, collections.Mapping): + raise TypeError("Not a valid power-levels content: %r" % (old_power_levels,)) + + power_levels = {} + for k, v in old_power_levels.items(): + + if isinstance(v, int): + power_levels[k] = v + continue + + if isinstance(v, collections.Mapping): + power_levels[k] = h = {} + for k1, v1 in v.items(): + # we should only have one level of nesting + if not isinstance(v1, int): + raise TypeError( + "Invalid power_levels value for %s.%s: %r" % (k, k1, v) + ) + h[k1] = v1 + continue + + raise TypeError("Invalid power_levels value for %s: %r" % (k, v)) + + return power_levels diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index a9490782b7e6..532ee22fa4c8 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -30,6 +30,7 @@ from synapse.api.constants import EventTypes, JoinRules, RoomCreationPreset from synapse.api.errors import AuthError, Codes, NotFoundError, StoreError, SynapseError from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion +from synapse.events.utils import copy_power_levels_contents from synapse.http.endpoint import parse_and_validate_server_name from synapse.storage.state import StateFilter from synapse.types import ( @@ -367,6 +368,15 @@ def clone_existing_room( if old_event: initial_state[k] = old_event.content + # deep-copy the power-levels event before we start modifying it + # note that if frozen_dicts are enabled, `power_levels` will be a frozen + # dict so we can't just copy.deepcopy it. + initial_state[ + (EventTypes.PowerLevels, "") + ] = power_levels = copy_power_levels_contents( + initial_state[(EventTypes.PowerLevels, "")] + ) + # Resolve the minimum power level required to send any state event # We will give the upgrading user this power level temporarily (if necessary) such that # they are able to copy all of the state events over, then revert them back to their @@ -375,8 +385,6 @@ def clone_existing_room( # Copy over user power levels now as this will not be possible with >100PL users once # the room has been created - power_levels = initial_state[(EventTypes.PowerLevels, "")] - # Calculate the minimum power level needed to clone the room event_power_levels = power_levels.get("events", {}) state_default = power_levels.get("state_default", 0) @@ -386,16 +394,7 @@ def clone_existing_room( # Raise the requester's power level in the new room if necessary current_power_level = power_levels["users"][user_id] if current_power_level < needed_power_level: - # make sure we copy the event content rather than overwriting it. - # note that if frozen_dicts are enabled, `power_levels` will be a frozen - # dict so we can't just copy.deepcopy it. - - new_power_levels = {k: v for k, v in power_levels.items() if k != "users"} - new_power_levels["users"] = { - k: v for k, v in power_levels.get("users", {}).items() if k != user_id - } - new_power_levels["users"][user_id] = needed_power_level - initial_state[(EventTypes.PowerLevels, "")] = new_power_levels + power_levels["users"][user_id] = needed_power_level yield self._send_events_for_new_room( requester, diff --git a/tests/events/test_utils.py b/tests/events/test_utils.py index 9e3d4d0f4756..2b13980dfdc6 100644 --- a/tests/events/test_utils.py +++ b/tests/events/test_utils.py @@ -15,9 +15,14 @@ from synapse.events import FrozenEvent -from synapse.events.utils import prune_event, serialize_event +from synapse.events.utils import ( + copy_power_levels_contents, + prune_event, + serialize_event, +) +from synapse.util.frozenutils import freeze -from .. import unittest +from tests import unittest def MockEvent(**kwargs): @@ -241,3 +246,39 @@ def test_event_fields_fail_if_fields_not_str(self): self.serialize( MockEvent(room_id="!foo:bar", content={"foo": "bar"}), ["room_id", 4] ) + + +class CopyPowerLevelsContentTestCase(unittest.TestCase): + def setUp(self) -> None: + self.test_content = { + "ban": 50, + "events": {"m.room.name": 100, "m.room.power_levels": 100}, + "events_default": 0, + "invite": 50, + "kick": 50, + "notifications": {"room": 20}, + "redact": 50, + "state_default": 50, + "users": {"@example:localhost": 100}, + "users_default": 0, + } + + def _test(self, input): + a = copy_power_levels_contents(input) + + self.assertEqual(a["ban"], 50) + self.assertEqual(a["events"]["m.room.name"], 100) + + # make sure that changing the copy changes the copy and not the orig + a["ban"] = 10 + a["events"]["m.room.power_levels"] = 20 + + self.assertEqual(input["ban"], 50) + self.assertEqual(input["events"]["m.room.power_levels"], 100) + + def test_unfrozen(self): + self._test(self.test_content) + + def test_frozen(self): + input = freeze(self.test_content) + self._test(input) From b36095ae5cd88d95802ecf01f8b0f541593ea773 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 28 Jan 2020 11:08:38 +0000 Subject: [PATCH 23/67] Set the PL for aliases events to 0. --- synapse/events/utils.py | 2 +- synapse/handlers/room.py | 17 +++++++++++++++-- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/synapse/events/utils.py b/synapse/events/utils.py index be57c6d9be43..f70f5032fb84 100644 --- a/synapse/events/utils.py +++ b/synapse/events/utils.py @@ -449,7 +449,7 @@ def copy_power_levels_contents( # we should only have one level of nesting if not isinstance(v1, int): raise TypeError( - "Invalid power_levels value for %s.%s: %r" % (k, k1, v) + "Invalid power_levels value for %s.%s: %r" % (k, k1, v1) ) h[k1] = v1 continue diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 532ee22fa4c8..a95b45d7914b 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -287,7 +287,16 @@ def _update_upgraded_room_pls( except AuthError as e: logger.warning("Unable to update PLs in old room: %s", e) - logger.info("Setting correct PLs in new room to %s", old_room_pl_state.content) + new_pl_content = copy_power_levels_contents(old_room_pl_state.content) + + # pre-msc2260 rooms may not have the right setting for aliases. If no other + # value is set, set it now. + events_default = new_pl_content.get("events_default", 0) + new_pl_content.setdefault("events", {}).setdefault( + EventTypes.Aliases, events_default + ) + + logger.info("Setting correct PLs in new room to %s", new_pl_content) yield self.event_creation_handler.create_and_send_nonmember_event( requester, { @@ -295,7 +304,7 @@ def _update_upgraded_room_pls( "state_key": "", "room_id": new_room_id, "sender": requester.user.to_string(), - "content": old_room_pl_state.content, + "content": new_pl_content, }, ratelimit=False, ) @@ -812,6 +821,10 @@ def send(etype, content, **kwargs): EventTypes.RoomHistoryVisibility: 100, EventTypes.CanonicalAlias: 50, EventTypes.RoomAvatar: 50, + # MSC2260: Allow everybody to send alias events by default + # This will be reudundant on pre-MSC2260 rooms, since the + # aliases event is special-cased. + EventTypes.Aliases: 0, }, "events_default": 0, "state_default": 50, From dcd85b976dc93851d7f5246fc0684d5c5f7d7b5b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 28 Jan 2020 17:46:34 +0000 Subject: [PATCH 24/67] Make /directory/room/ handle restrictive power levels Fixes a bug where the alias would be added, but `PUT /directory/room/` would return a 403. --- synapse/handlers/directory.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/synapse/handlers/directory.py b/synapse/handlers/directory.py index a07d2f1a17e8..8c5980cb0cb0 100644 --- a/synapse/handlers/directory.py +++ b/synapse/handlers/directory.py @@ -151,7 +151,12 @@ def create_association( yield self._create_association(room_alias, room_id, servers, creator=user_id) if send_event: - yield self.send_room_alias_update_event(requester, room_id) + try: + yield self.send_room_alias_update_event(requester, room_id) + except AuthError as e: + # sending the aliases event may fail due to the user not having + # permission in the room; this is permitted. + logger.info("Skipping updating aliases event due to auth error %s", e) @defer.inlineCallbacks def delete_association(self, requester, room_alias, send_event=True): From 750d4d7599d1985bc262853494b21e9fee34c637 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 28 Jan 2020 11:09:14 +0000 Subject: [PATCH 25/67] changelog --- changelog.d/6790.feature | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/6790.feature diff --git a/changelog.d/6790.feature b/changelog.d/6790.feature new file mode 100644 index 000000000000..df9e4b77abde --- /dev/null +++ b/changelog.d/6790.feature @@ -0,0 +1 @@ +Implement updated authorization rules for aliases events, from [MSC2260](https://github.com/matrix-org/matrix-doc/pull/2260). From a855b7c3a82458602bd62ed00bffed269f2acfec Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 29 Jan 2020 12:06:31 +0000 Subject: [PATCH 26/67] Remove unused DeviceRow class (#6800) --- changelog.d/6800.bugfix | 1 + synapse/federation/send_queue.py | 21 +-------------------- 2 files changed, 2 insertions(+), 20 deletions(-) create mode 100644 changelog.d/6800.bugfix diff --git a/changelog.d/6800.bugfix b/changelog.d/6800.bugfix new file mode 100644 index 000000000000..322a2758affa --- /dev/null +++ b/changelog.d/6800.bugfix @@ -0,0 +1 @@ +Fix race in federation sender worker that delayed sending of device updates. diff --git a/synapse/federation/send_queue.py b/synapse/federation/send_queue.py index 0bb82a6bb3e5..001bb304ae62 100644 --- a/synapse/federation/send_queue.py +++ b/synapse/federation/send_queue.py @@ -454,28 +454,9 @@ def add_to_buffer(self, buff): buff.edus.setdefault(self.edu.destination, []).append(self.edu) -class DeviceRow(BaseFederationRow, namedtuple("DeviceRow", ("destination",))): # str - """Streams the fact that either a) there is pending to device messages for - users on the remote, or b) a local users device has changed and needs to - be sent to the remote. - """ - - TypeId = "d" - - @staticmethod - def from_data(data): - return DeviceRow(destination=data["destination"]) - - def to_data(self): - return {"destination": self.destination} - - def add_to_buffer(self, buff): - buff.device_destinations.add(self.destination) - - TypeToRow = { Row.TypeId: Row - for Row in (PresenceRow, PresenceDestinationsRow, KeyedEduRow, EduRow, DeviceRow) + for Row in (PresenceRow, PresenceDestinationsRow, KeyedEduRow, EduRow,) } From 5a246611e3cbf27cf1dd7e4453adc8040cddd6a2 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 30 Jan 2020 11:25:59 +0000 Subject: [PATCH 27/67] Type defintions for use in refactoring for redaction changes (#6803) * Bump signedjson to 1.1 ... so that we can use the type definitions * Fix breakage caused by upgrade to signedjson 1.1 Thanks, @illicitonion... --- changelog.d/6803.misc | 1 + synapse/events/__init__.py | 5 +++-- synapse/python_dependencies.py | 4 +++- synapse/types.py | 7 ++++++- tests/storage/test_keys.py | 15 +++++++++++---- 5 files changed, 24 insertions(+), 8 deletions(-) create mode 100644 changelog.d/6803.misc diff --git a/changelog.d/6803.misc b/changelog.d/6803.misc new file mode 100644 index 000000000000..08aa80bcd91f --- /dev/null +++ b/changelog.d/6803.misc @@ -0,0 +1 @@ +Refactoring work in preparation for changing the event redaction algorithm. diff --git a/synapse/events/__init__.py b/synapse/events/__init__.py index 72c09327f43a..f813fa2fe7cb 100644 --- a/synapse/events/__init__.py +++ b/synapse/events/__init__.py @@ -23,6 +23,7 @@ from synapse.api.errors import UnsupportedRoomVersionError from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, EventFormatVersions +from synapse.types import JsonDict from synapse.util.caches import intern_dict from synapse.util.frozenutils import freeze @@ -197,7 +198,7 @@ def membership(self): def is_state(self): return hasattr(self, "state_key") and self.state_key is not None - def get_dict(self): + def get_dict(self) -> JsonDict: d = dict(self._event_dict) d.update({"signatures": self.signatures, "unsigned": dict(self.unsigned)}) @@ -209,7 +210,7 @@ def get(self, key, default=None): def get_internal_metadata_dict(self): return self.internal_metadata.get_dict() - def get_pdu_json(self, time_now=None): + def get_pdu_json(self, time_now=None) -> JsonDict: pdu_json = self.get_dict() if time_now is not None and "age_ts" in pdu_json["unsigned"]: diff --git a/synapse/python_dependencies.py b/synapse/python_dependencies.py index 5871feaafdf9..8de8cb2c1287 100644 --- a/synapse/python_dependencies.py +++ b/synapse/python_dependencies.py @@ -1,6 +1,7 @@ # Copyright 2015, 2016 OpenMarket Ltd # Copyright 2017 Vector Creations Ltd # Copyright 2018 New Vector Ltd +# Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -43,7 +44,8 @@ "frozendict>=1", "unpaddedbase64>=1.1.0", "canonicaljson>=1.1.3", - "signedjson>=1.0.0", + # we use the type definitions added in signedjson 1.1. + "signedjson>=1.1.0", "pynacl>=1.2.1", "idna>=2.5", # validating SSL certs for IP addresses requires service_identity 18.1. diff --git a/synapse/types.py b/synapse/types.py index 65e4d8c18167..f3cd465735fd 100644 --- a/synapse/types.py +++ b/synapse/types.py @@ -17,7 +17,7 @@ import string import sys from collections import namedtuple -from typing import Dict, Tuple, TypeVar +from typing import Any, Dict, Tuple, TypeVar import attr from signedjson.key import decode_verify_key_bytes @@ -43,6 +43,11 @@ class Collection(Iterable[T_co], Container[T_co], Sized): StateMap = Dict[Tuple[str, str], T] +# the type of a JSON-serialisable dict. This could be made stronger, but it will +# do for now. +JsonDict = Dict[str, Any] + + class Requester( namedtuple( "Requester", ["user", "access_token_id", "is_guest", "device_id", "app_service"] diff --git a/tests/storage/test_keys.py b/tests/storage/test_keys.py index e07ff0120173..95f309fbbc41 100644 --- a/tests/storage/test_keys.py +++ b/tests/storage/test_keys.py @@ -14,6 +14,7 @@ # limitations under the License. import signedjson.key +import unpaddedbase64 from twisted.internet.defer import Deferred @@ -21,11 +22,17 @@ import tests.unittest -KEY_1 = signedjson.key.decode_verify_key_base64( - "ed25519", "key1", "fP5l4JzpZPq/zdbBg5xx6lQGAAOM9/3w94cqiJ5jPrw" + +def decode_verify_key_base64(key_id: str, key_base64: str): + key_bytes = unpaddedbase64.decode_base64(key_base64) + return signedjson.key.decode_verify_key_bytes(key_id, key_bytes) + + +KEY_1 = decode_verify_key_base64( + "ed25519:key1", "fP5l4JzpZPq/zdbBg5xx6lQGAAOM9/3w94cqiJ5jPrw" ) -KEY_2 = signedjson.key.decode_verify_key_base64( - "ed25519", "key2", "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw" +KEY_2 = decode_verify_key_base64( + "ed25519:key2", "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw" ) From c80a9fe13dc098037a37bb0920a00b2c8cb53174 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 30 Jan 2020 15:06:58 +0000 Subject: [PATCH 28/67] When a client asks for remote keys check if should resync. (#6797) If we detect that the remote users' keys may have changed then we should attempt to resync against the remote server rather than using the (potentially) stale local cache. --- changelog.d/6797.misc | 1 + synapse/storage/data_stores/main/devices.py | 32 +++++++++++++++++++-- 2 files changed, 30 insertions(+), 3 deletions(-) create mode 100644 changelog.d/6797.misc diff --git a/changelog.d/6797.misc b/changelog.d/6797.misc new file mode 100644 index 000000000000..e9127bac517c --- /dev/null +++ b/changelog.d/6797.misc @@ -0,0 +1 @@ +When a client asks for a remote user's device keys check if the local cache for that user has been marked as potentially stale. diff --git a/synapse/storage/data_stores/main/devices.py b/synapse/storage/data_stores/main/devices.py index 30bf66b2b69f..a34415ff14ca 100644 --- a/synapse/storage/data_stores/main/devices.py +++ b/synapse/storage/data_stores/main/devices.py @@ -32,7 +32,7 @@ from synapse.metrics.background_process_metrics import run_as_background_process from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause from synapse.storage.database import Database -from synapse.types import get_verify_key_from_cross_signing_key +from synapse.types import Collection, get_verify_key_from_cross_signing_key from synapse.util.caches.descriptors import ( Cache, cached, @@ -443,8 +443,15 @@ def get_user_devices_from_cache(self, query_list): """ user_ids = set(user_id for user_id, _ in query_list) user_map = yield self.get_device_list_last_stream_id_for_remotes(list(user_ids)) - user_ids_in_cache = set( - user_id for user_id, stream_id in user_map.items() if stream_id + + # We go and check if any of the users need to have their device lists + # resynced. If they do then we remove them from the cached list. + users_needing_resync = yield self.get_user_ids_requiring_device_list_resync( + user_ids + ) + user_ids_in_cache = ( + set(user_id for user_id, stream_id in user_map.items() if stream_id) + - users_needing_resync ) user_ids_not_in_cache = user_ids - user_ids_in_cache @@ -641,6 +648,25 @@ def get_device_list_last_stream_id_for_remotes(self, user_ids): return results + @defer.inlineCallbacks + def get_user_ids_requiring_device_list_resync(self, user_ids: Collection[str]): + """Given a list of remote users return the list of users that we + should resync the device lists for. + + Returns: + Deferred[Set[str]] + """ + + rows = yield self.db.simple_select_many_batch( + table="device_lists_remote_resync", + column="user_id", + iterable=user_ids, + retcols=("user_id",), + desc="get_user_ids_requiring_device_list_resync", + ) + + return {row["user_id"] for row in rows} + def mark_remote_user_device_cache_as_stale(self, user_id: str): """Records that the server has reason to believe the cache of the devices for the remote users is out of date. From a5bab2d058747eb7165b20808b34c970e34a4b11 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 30 Jan 2020 16:10:30 +0000 Subject: [PATCH 29/67] When server leaves room check for stale device lists. (#6801) When a server leaves a room it may stop sharing a room with remote users, and thus not get any updates to their device lists. So we need to check for this case and delete those device lists from the cache. We don't need to do this if we stop sharing a room because the remote user leaves the room, because we track that case via looking at membership changes. --- changelog.d/6801.bugfix | 1 + .../storage/data_stores/main/roommember.py | 37 +++++++++++++- synapse/storage/persist_events.py | 51 +++++++++++++++++-- 3 files changed, 83 insertions(+), 6 deletions(-) create mode 100644 changelog.d/6801.bugfix diff --git a/changelog.d/6801.bugfix b/changelog.d/6801.bugfix new file mode 100644 index 000000000000..f401fa5d6930 --- /dev/null +++ b/changelog.d/6801.bugfix @@ -0,0 +1 @@ +Fix bug where Synapse didn't invalidate cache of remote users' devices when Synapse left a room. diff --git a/synapse/storage/data_stores/main/roommember.py b/synapse/storage/data_stores/main/roommember.py index 9acef7c95040..042289f0e00d 100644 --- a/synapse/storage/data_stores/main/roommember.py +++ b/synapse/storage/data_stores/main/roommember.py @@ -15,7 +15,7 @@ # limitations under the License. import logging -from typing import Iterable, List +from typing import Iterable, List, Set from six import iteritems, itervalues @@ -40,7 +40,7 @@ ProfileInfo, RoomsForUser, ) -from synapse.types import get_domain_from_id +from synapse.types import Collection, get_domain_from_id from synapse.util.async_helpers import Linearizer from synapse.util.caches import intern_string from synapse.util.caches.descriptors import cached, cachedInlineCallbacks, cachedList @@ -439,6 +439,39 @@ def _get_rooms_for_user_with_stream_ordering_txn(self, txn, user_id): return results + async def get_users_server_still_shares_room_with( + self, user_ids: Collection[str] + ) -> Set[str]: + """Given a list of users return the set that the server still share a + room with. + """ + + if not user_ids: + return set() + + def _get_users_server_still_shares_room_with_txn(txn): + sql = """ + SELECT state_key FROM current_state_events + WHERE + type = 'm.room.member' + AND membership = 'join' + AND %s + GROUP BY state_key + """ + + clause, args = make_in_list_sql_clause( + self.database_engine, "state_key", user_ids + ) + + txn.execute(sql % (clause,), args) + + return set(row[0] for row in txn) + + return await self.db.runInteraction( + "get_users_server_still_shares_room_with", + _get_users_server_still_shares_room_with_txn, + ) + @defer.inlineCallbacks def get_rooms_for_user(self, user_id, on_invalidate=None): """Returns a set of room_ids the user is currently joined to. diff --git a/synapse/storage/persist_events.py b/synapse/storage/persist_events.py index d060c8b99267..86166fd4c17b 100644 --- a/synapse/storage/persist_events.py +++ b/synapse/storage/persist_events.py @@ -18,7 +18,7 @@ import itertools import logging from collections import deque, namedtuple -from typing import Iterable, List, Optional, Tuple +from typing import Iterable, List, Optional, Set, Tuple from six import iteritems from six.moves import range @@ -318,6 +318,11 @@ async def _persist_events( # room state_delta_for_room = {} + # Set of remote users which were in rooms the server has left. We + # should check if we still share any rooms and if not we mark their + # device lists as stale. + potentially_left_users = set() # type: Set[str] + if not backfilled: with Measure(self._clock, "_calculate_state_and_extrem"): # Work out the new "current state" for each room. @@ -421,7 +426,11 @@ async def _persist_events( # the room then we delete the current state and # extremities. is_still_joined = await self._is_server_still_joined( - room_id, ev_ctx_rm, delta, current_state + room_id, + ev_ctx_rm, + delta, + current_state, + potentially_left_users, ) if not is_still_joined: logger.info("Server no longer in room %s", room_id) @@ -444,6 +453,8 @@ async def _persist_events( backfilled=backfilled, ) + await self._handle_potentially_left_users(potentially_left_users) + async def _calculate_new_extremities( self, room_id: str, @@ -688,6 +699,7 @@ async def _is_server_still_joined( ev_ctx_rm: List[Tuple[FrozenEvent, EventContext]], delta: DeltaState, current_state: Optional[StateMap[str]], + potentially_left_users: Set[str], ) -> bool: """Check if the server will still be joined after the given events have been persised. @@ -699,6 +711,9 @@ async def _is_server_still_joined( and what the new current state will be. current_state: The new current state if it already been calculated, otherwise None. + potentially_left_users: If the server has left the room, then joined + remote users will be added to this set to indicate that the + server may no longer be sharing a room with them. """ if not any( @@ -741,5 +756,33 @@ async def _is_server_still_joined( is_still_joined = any(row["membership"] == Membership.JOIN for row in rows) if is_still_joined: return True - else: - return False + + # The server will leave the room, so we go and find out which remote + # users will still be joined when we leave. + remote_event_ids = [ + event_id + for (typ, state_key,), event_id in current_state.items() + if typ == EventTypes.Member and not self.is_mine_id(state_key) + ] + rows = await self.main_store.get_membership_from_event_ids(remote_event_ids) + potentially_left_users.update( + row["user_id"] for row in rows if row["membership"] == Membership.JOIN + ) + + return False + + async def _handle_potentially_left_users(self, user_ids: Set[str]): + """Given a set of remote users check if the server still shares a room with + them. If not then mark those users' device cache as stale. + """ + + if not user_ids: + return + + joined_users = await self.main_store.get_users_server_still_shares_room_with( + user_ids + ) + left_users = user_ids - joined_users + + for user_id in left_users: + await self.main_store.mark_remote_user_device_list_as_unsubscribed(user_id) From c3d4ad8afdbe181707451410100dec4817c2c01a Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 30 Jan 2020 16:42:11 +0000 Subject: [PATCH 30/67] Fix sending server up commands from workers (#6811) Co-authored-by: Andrew Morgan <1342360+anoadragon453@users.noreply.github.com> --- changelog.d/6811.bugfix | 1 + synapse/federation/transport/client.py | 5 ++++- synapse/federation/transport/server.py | 26 +++++++++++++++----------- synapse/replication/tcp/client.py | 4 ++++ synapse/server.pyi | 12 +++++++++++- tox.ini | 1 + 6 files changed, 36 insertions(+), 13 deletions(-) create mode 100644 changelog.d/6811.bugfix diff --git a/changelog.d/6811.bugfix b/changelog.d/6811.bugfix new file mode 100644 index 000000000000..361f2fc2e816 --- /dev/null +++ b/changelog.d/6811.bugfix @@ -0,0 +1 @@ +Fix waking up other workers when remote server is detected to have come back online. diff --git a/synapse/federation/transport/client.py b/synapse/federation/transport/client.py index 198257414b3f..dc563538deb6 100644 --- a/synapse/federation/transport/client.py +++ b/synapse/federation/transport/client.py @@ -15,6 +15,7 @@ # limitations under the License. import logging +from typing import Any, Dict from six.moves import urllib @@ -352,7 +353,9 @@ def get_public_rooms( else: path = _create_v1_path("/publicRooms") - args = {"include_all_networks": "true" if include_all_networks else "false"} + args = { + "include_all_networks": "true" if include_all_networks else "false" + } # type: Dict[str, Any] if third_party_instance_id: args["third_party_instance_id"] = (third_party_instance_id,) if limit: diff --git a/synapse/federation/transport/server.py b/synapse/federation/transport/server.py index d8cf9ed299f6..125eadd796ef 100644 --- a/synapse/federation/transport/server.py +++ b/synapse/federation/transport/server.py @@ -18,6 +18,7 @@ import functools import logging import re +from typing import Optional, Tuple, Type from twisted.internet.defer import maybeDeferred @@ -267,6 +268,8 @@ class BaseFederationServlet(object): returned. """ + PATH = "" # Overridden in subclasses, the regex to match against the path. + REQUIRE_AUTH = True PREFIX = FEDERATION_V1_PREFIX # Allows specifying the API version @@ -347,9 +350,6 @@ async def new_func(request, *args, **kwargs): return response - # Extra logic that functools.wraps() doesn't finish - new_func.__self__ = func.__self__ - return new_func def register(self, server): @@ -824,7 +824,7 @@ async def on_POST(self, origin, content, query): if not self.allow_access: raise FederationDeniedError(origin) - limit = int(content.get("limit", 100)) + limit = int(content.get("limit", 100)) # type: Optional[int] since_token = content.get("since", None) search_filter = content.get("filter", None) @@ -971,7 +971,7 @@ async def on_POST(self, origin, content, query, group_id, room_id, config_key): if get_domain_from_id(requester_user_id) != origin: raise SynapseError(403, "requester_user_id doesn't match origin") - result = await self.groups_handler.update_room_in_group( + result = await self.handler.update_room_in_group( group_id, requester_user_id, room_id, config_key, content ) @@ -1422,11 +1422,13 @@ async def on_GET(self, origin, content, query, room_id): On3pidBindServlet, FederationVersionServlet, RoomComplexityServlet, -) +) # type: Tuple[Type[BaseFederationServlet], ...] -OPENID_SERVLET_CLASSES = (OpenIdUserInfo,) +OPENID_SERVLET_CLASSES = ( + OpenIdUserInfo, +) # type: Tuple[Type[BaseFederationServlet], ...] -ROOM_LIST_CLASSES = (PublicRoomList,) +ROOM_LIST_CLASSES = (PublicRoomList,) # type: Tuple[Type[PublicRoomList], ...] GROUP_SERVER_SERVLET_CLASSES = ( FederationGroupsProfileServlet, @@ -1447,17 +1449,19 @@ async def on_GET(self, origin, content, query, room_id): FederationGroupsAddRoomsServlet, FederationGroupsAddRoomsConfigServlet, FederationGroupsSettingJoinPolicyServlet, -) +) # type: Tuple[Type[BaseFederationServlet], ...] GROUP_LOCAL_SERVLET_CLASSES = ( FederationGroupsLocalInviteServlet, FederationGroupsRemoveLocalUserServlet, FederationGroupsBulkPublicisedServlet, -) +) # type: Tuple[Type[BaseFederationServlet], ...] -GROUP_ATTESTATION_SERVLET_CLASSES = (FederationGroupsRenewAttestaionServlet,) +GROUP_ATTESTATION_SERVLET_CLASSES = ( + FederationGroupsRenewAttestaionServlet, +) # type: Tuple[Type[BaseFederationServlet], ...] DEFAULT_SERVLET_GROUPS = ( "federation", diff --git a/synapse/replication/tcp/client.py b/synapse/replication/tcp/client.py index fc06a7b0536d..02ab5b66eab7 100644 --- a/synapse/replication/tcp/client.py +++ b/synapse/replication/tcp/client.py @@ -31,6 +31,7 @@ Command, FederationAckCommand, InvalidateCacheCommand, + RemoteServerUpCommand, RemovePusherCommand, UserIpCommand, UserSyncCommand, @@ -210,6 +211,9 @@ def send_user_ip(self, user_id, access_token, ip, user_agent, device_id, last_se cmd = UserIpCommand(user_id, access_token, ip, user_agent, device_id, last_seen) self.send_command(cmd) + def send_remote_server_up(self, server: str): + self.send_command(RemoteServerUpCommand(server)) + def await_sync(self, data): """Returns a deferred that is resolved when we receive a SYNC command with given data. diff --git a/synapse/server.pyi b/synapse/server.pyi index 07314030473a..90347ac23eb0 100644 --- a/synapse/server.pyi +++ b/synapse/server.pyi @@ -2,8 +2,8 @@ import twisted.internet import synapse.api.auth import synapse.config.homeserver +import synapse.crypto.keyring import synapse.federation.sender -import synapse.federation.transaction_queue import synapse.federation.transport.client import synapse.handlers import synapse.handlers.auth @@ -17,6 +17,7 @@ import synapse.handlers.room_member import synapse.handlers.set_password import synapse.http.client import synapse.notifier +import synapse.replication.tcp.client import synapse.rest.media.v1.media_repository import synapse.server_notices.server_notices_manager import synapse.server_notices.server_notices_sender @@ -27,6 +28,9 @@ class HomeServer(object): @property def config(self) -> synapse.config.homeserver.HomeServerConfig: pass + @property + def hostname(self) -> str: + pass def get_auth(self) -> synapse.api.auth.Auth: pass def get_auth_handler(self) -> synapse.handlers.auth.AuthHandler: @@ -97,3 +101,9 @@ class HomeServer(object): pass def get_reactor(self) -> twisted.internet.base.ReactorBase: pass + def get_keyring(self) -> synapse.crypto.keyring.Keyring: + pass + def get_tcp_replication( + self, + ) -> synapse.replication.tcp.client.ReplicationClientHandler: + pass diff --git a/tox.ini b/tox.ini index 1d946a02ba74..88ef12bebd32 100644 --- a/tox.ini +++ b/tox.ini @@ -179,6 +179,7 @@ extras = all commands = mypy \ synapse/api \ synapse/config/ \ + synapse/federation/transport \ synapse/handlers/ui_auth \ synapse/logging/ \ synapse/module_api \ From b660327056cdced860d532ab2404a26946da7ef5 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 30 Jan 2020 17:06:38 +0000 Subject: [PATCH 31/67] Resync remote device list when detected as stale. (#6786) --- changelog.d/6786.misc | 1 + synapse/handlers/devicemessage.py | 10 ++++++++-- synapse/handlers/federation.py | 18 ++++++++++++++++-- tests/handlers/test_typing.py | 6 +++--- 4 files changed, 28 insertions(+), 7 deletions(-) create mode 100644 changelog.d/6786.misc diff --git a/changelog.d/6786.misc b/changelog.d/6786.misc new file mode 100644 index 000000000000..94c692e53a7b --- /dev/null +++ b/changelog.d/6786.misc @@ -0,0 +1 @@ +Attempt to resync remote users' devices when detected as stale. diff --git a/synapse/handlers/devicemessage.py b/synapse/handlers/devicemessage.py index 5c5fe77be2e1..05c4b3eec0e9 100644 --- a/synapse/handlers/devicemessage.py +++ b/synapse/handlers/devicemessage.py @@ -21,6 +21,7 @@ from twisted.internet import defer from synapse.api.errors import SynapseError +from synapse.logging.context import run_in_background from synapse.logging.opentracing import ( get_active_span_text_map, log_kv, @@ -48,6 +49,8 @@ def __init__(self, hs): "m.direct_to_device", self.on_direct_to_device_edu ) + self._device_list_updater = hs.get_device_handler().device_list_updater + @defer.inlineCallbacks def on_direct_to_device_edu(self, origin, content): local_messages = {} @@ -134,8 +137,11 @@ def _check_for_unknown_devices( unknown_devices, ) yield self.store.mark_remote_user_device_cache_as_stale(sender_user_id) - # TODO: Poke something to start trying to refetch user's - # keys. + + # Immediately attempt a resync in the background + run_in_background( + self._device_list_updater.user_device_resync, sender_user_id + ) @defer.inlineCallbacks def send_device_message(self, sender_user_id, message_type, messages): diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index a67020a25903..ca484e545837 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -57,6 +57,7 @@ run_in_background, ) from synapse.logging.utils import log_function +from synapse.replication.http.devices import ReplicationUserDevicesResyncRestServlet from synapse.replication.http.federation import ( ReplicationCleanRoomRestServlet, ReplicationFederationSendEventsRestServlet, @@ -156,6 +157,13 @@ def __init__(self, hs): hs ) + if hs.config.worker_app: + self._user_device_resync = ReplicationUserDevicesResyncRestServlet.make_client( + hs + ) + else: + self._device_list_updater = hs.get_device_handler().device_list_updater + # When joining a room we need to queue any events for that room up self.room_queues = {} self._room_pdu_linearizer = Linearizer("fed_room_pdu") @@ -759,8 +767,14 @@ async def _process_received_pdu( await self.store.mark_remote_user_device_cache_as_stale( event.sender ) - # TODO: Poke something to start trying to refetch user's - # keys. + + # Immediately attempt a resync in the background + if self.config.worker_app: + return run_in_background(self._user_device_resync, event.sender) + else: + return run_in_background( + self._device_list_updater.user_device_resync, event.sender + ) @log_function async def backfill(self, dest, room_id, limit, extremities): diff --git a/tests/handlers/test_typing.py b/tests/handlers/test_typing.py index 596ddc697050..68b9847bd2b2 100644 --- a/tests/handlers/test_typing.py +++ b/tests/handlers/test_typing.py @@ -81,6 +81,9 @@ def make_homeserver(self, reactor, clock): ] ) + # the tests assume that we are starting at unix time 1000 + reactor.pump((1000,)) + hs = self.setup_test_homeserver( notifier=Mock(), http_client=mock_federation_client, keyring=mock_keyring ) @@ -90,9 +93,6 @@ def make_homeserver(self, reactor, clock): return hs def prepare(self, reactor, clock, hs): - # the tests assume that we are starting at unix time 1000 - reactor.pump((1000,)) - mock_notifier = hs.get_notifier() self.on_new_event = mock_notifier.on_new_event From 57ad702af0511aff36ca69fb2e9fc3399cce3a8d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 30 Jan 2020 17:17:44 +0000 Subject: [PATCH 32/67] Backgroud update to clean out rooms from current state (#6802) --- changelog.d/6802.misc | 1 + .../57/delete_old_current_state_events.sql | 19 +++ synapse/storage/data_stores/main/state.py | 108 +++++++++++++++++- 3 files changed, 126 insertions(+), 2 deletions(-) create mode 100644 changelog.d/6802.misc create mode 100644 synapse/storage/data_stores/main/schema/delta/57/delete_old_current_state_events.sql diff --git a/changelog.d/6802.misc b/changelog.d/6802.misc new file mode 100644 index 000000000000..a77ba1d7a5ca --- /dev/null +++ b/changelog.d/6802.misc @@ -0,0 +1 @@ +Add background update to clean out left rooms from current state. diff --git a/synapse/storage/data_stores/main/schema/delta/57/delete_old_current_state_events.sql b/synapse/storage/data_stores/main/schema/delta/57/delete_old_current_state_events.sql new file mode 100644 index 000000000000..a133d87a193f --- /dev/null +++ b/synapse/storage/data_stores/main/schema/delta/57/delete_old_current_state_events.sql @@ -0,0 +1,19 @@ +/* Copyright 2020 The Matrix.org Foundation C.I.C + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- Add background update to go and delete current state events for rooms the +-- server is no longer in. +INSERT into background_updates (update_name, progress_json) + VALUES ('delete_old_current_state_events', '{}'); diff --git a/synapse/storage/data_stores/main/state.py b/synapse/storage/data_stores/main/state.py index bd7b0276f14d..9b6f68e77764 100644 --- a/synapse/storage/data_stores/main/state.py +++ b/synapse/storage/data_stores/main/state.py @@ -21,12 +21,13 @@ from twisted.internet import defer -from synapse.api.constants import EventTypes +from synapse.api.constants import EventTypes, Membership from synapse.api.errors import NotFoundError from synapse.events import EventBase from synapse.events.snapshot import EventContext from synapse.storage._base import SQLBaseStore from synapse.storage.data_stores.main.events_worker import EventsWorkerStore +from synapse.storage.data_stores.main.roommember import RoomMemberWorkerStore from synapse.storage.database import Database from synapse.storage.state import StateFilter from synapse.util.caches import intern_string @@ -300,14 +301,17 @@ def get_referenced_state_groups(self, state_groups): return set(row["state_group"] for row in rows) -class MainStateBackgroundUpdateStore(SQLBaseStore): +class MainStateBackgroundUpdateStore(RoomMemberWorkerStore): CURRENT_STATE_INDEX_UPDATE_NAME = "current_state_members_idx" EVENT_STATE_GROUP_INDEX_UPDATE_NAME = "event_to_state_groups_sg_index" + DELETE_CURRENT_STATE_UPDATE_NAME = "delete_old_current_state_events" def __init__(self, database: Database, db_conn, hs): super(MainStateBackgroundUpdateStore, self).__init__(database, db_conn, hs) + self.server_name = hs.hostname + self.db.updates.register_background_index_update( self.CURRENT_STATE_INDEX_UPDATE_NAME, index_name="current_state_events_member_index", @@ -321,6 +325,106 @@ def __init__(self, database: Database, db_conn, hs): table="event_to_state_groups", columns=["state_group"], ) + self.db.updates.register_background_update_handler( + self.DELETE_CURRENT_STATE_UPDATE_NAME, self._background_remove_left_rooms, + ) + + async def _background_remove_left_rooms(self, progress, batch_size): + """Background update to delete rows from `current_state_events` and + `event_forward_extremities` tables of rooms that the server is no + longer joined to. + """ + + last_room_id = progress.get("last_room_id", "") + + def _background_remove_left_rooms_txn(txn): + sql = """ + SELECT DISTINCT room_id FROM current_state_events + WHERE room_id > ? ORDER BY room_id LIMIT ? + """ + + txn.execute(sql, (last_room_id, batch_size)) + room_ids = list(row[0] for row in txn) + if not room_ids: + return True, set() + + sql = """ + SELECT room_id + FROM current_state_events + WHERE + room_id > ? AND room_id <= ? + AND type = 'm.room.member' + AND membership = 'join' + AND state_key LIKE ? + GROUP BY room_id + """ + + txn.execute(sql, (last_room_id, room_ids[-1], "%:" + self.server_name)) + + joined_room_ids = set(row[0] for row in txn) + + left_rooms = set(room_ids) - joined_room_ids + + # First we get all users that we still think were joined to the + # room. This is so that we can mark those device lists as + # potentially stale, since there may have been a period where the + # server didn't share a room with the remote user and therefore may + # have missed any device updates. + rows = self.db.simple_select_many_txn( + txn, + table="current_state_events", + column="room_id", + iterable=left_rooms, + keyvalues={"type": EventTypes.Member, "membership": Membership.JOIN}, + retcols=("state_key",), + ) + + potentially_left_users = set(row["state_key"] for row in rows) + + # Now lets actually delete the rooms from the DB. + self.db.simple_delete_many_txn( + txn, + table="current_state_events", + column="room_id", + iterable=left_rooms, + keyvalues={}, + ) + + self.db.simple_delete_many_txn( + txn, + table="event_forward_extremities", + column="room_id", + iterable=left_rooms, + keyvalues={}, + ) + + self.db.updates._background_update_progress_txn( + txn, + self.DELETE_CURRENT_STATE_UPDATE_NAME, + {"last_room_id": room_ids[-1]}, + ) + + return False, potentially_left_users + + finished, potentially_left_users = await self.db.runInteraction( + "_background_remove_left_rooms", _background_remove_left_rooms_txn + ) + + if finished: + await self.db.updates._end_background_update( + self.DELETE_CURRENT_STATE_UPDATE_NAME + ) + + # Now go and check if we still share a room with the remote users in + # the deleted rooms. If not mark their device lists as stale. + joined_users = await self.get_users_server_still_shares_room_with( + potentially_left_users + ) + + for user_id in potentially_left_users - joined_users: + await self.mark_remote_user_device_list_as_unsubscribed(user_id) + + return batch_size class StateStore(StateGroupWorkerStore, MainStateBackgroundUpdateStore): From 184303b8650a90256f84bc9801b749a5b81b6d4b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 30 Jan 2020 17:20:55 +0000 Subject: [PATCH 33/67] MSC2260: Block direct sends of m.room.aliases events (#6794) as per MSC2260 --- changelog.d/6794.feature | 1 + synapse/rest/client/v1/room.py | 12 ++++++++ tests/rest/admin/test_admin.py | 7 ----- tests/rest/client/v1/test_directory.py | 41 ++++++++++---------------- 4 files changed, 28 insertions(+), 33 deletions(-) create mode 100644 changelog.d/6794.feature diff --git a/changelog.d/6794.feature b/changelog.d/6794.feature new file mode 100644 index 000000000000..df9e4b77abde --- /dev/null +++ b/changelog.d/6794.feature @@ -0,0 +1 @@ +Implement updated authorization rules for aliases events, from [MSC2260](https://github.com/matrix-org/matrix-doc/pull/2260). diff --git a/synapse/rest/client/v1/room.py b/synapse/rest/client/v1/room.py index 5aef8238b8a7..6f31584c51d4 100644 --- a/synapse/rest/client/v1/room.py +++ b/synapse/rest/client/v1/room.py @@ -184,6 +184,12 @@ async def on_PUT(self, request, room_id, event_type, state_key, txn_id=None): content = parse_json_object_from_request(request) + if event_type == EventTypes.Aliases: + # MSC2260 + raise SynapseError( + 400, "Cannot send m.room.aliases events via /rooms/{room_id}/state" + ) + event_dict = { "type": event_type, "content": content, @@ -231,6 +237,12 @@ async def on_POST(self, request, room_id, event_type, txn_id=None): requester = await self.auth.get_user_by_req(request, allow_guest=True) content = parse_json_object_from_request(request) + if event_type == EventTypes.Aliases: + # MSC2260 + raise SynapseError( + 400, "Cannot send m.room.aliases events via /rooms/{room_id}/send" + ) + event_dict = { "type": event_type, "content": content, diff --git a/tests/rest/admin/test_admin.py b/tests/rest/admin/test_admin.py index 0342aed416b0..e5984aaad851 100644 --- a/tests/rest/admin/test_admin.py +++ b/tests/rest/admin/test_admin.py @@ -868,13 +868,6 @@ def test_correct_room_attributes(self): self.assertEqual(200, int(channel.result["code"]), msg=channel.result["body"]) # Set this new alias as the canonical alias for this room - self.helper.send_state( - room_id, - "m.room.aliases", - {"aliases": [test_alias]}, - tok=self.admin_user_tok, - state_key="test", - ) self.helper.send_state( room_id, "m.room.canonical_alias", diff --git a/tests/rest/client/v1/test_directory.py b/tests/rest/client/v1/test_directory.py index 633b7dbda093..914cf5492716 100644 --- a/tests/rest/client/v1/test_directory.py +++ b/tests/rest/client/v1/test_directory.py @@ -51,26 +51,30 @@ def prepare(self, reactor, clock, homeserver): self.user = self.register_user("user", "test") self.user_tok = self.login("user", "test") - def test_state_event_not_in_room(self): - self.ensure_user_left_room() - self.set_alias_via_state_event(403) + def test_cannot_set_alias_via_state_event(self): + self.ensure_user_joined_room() + url = "/_matrix/client/r0/rooms/%s/state/m.room.aliases/%s" % ( + self.room_id, + self.hs.hostname, + ) + + data = {"aliases": [self.random_alias(5)]} + request_data = json.dumps(data) + + request, channel = self.make_request( + "PUT", url, request_data, access_token=self.user_tok + ) + self.render(request) + self.assertEqual(channel.code, 400, channel.result) def test_directory_endpoint_not_in_room(self): self.ensure_user_left_room() self.set_alias_via_directory(403) - def test_state_event_in_room_too_long(self): - self.ensure_user_joined_room() - self.set_alias_via_state_event(400, alias_length=256) - def test_directory_in_room_too_long(self): self.ensure_user_joined_room() self.set_alias_via_directory(400, alias_length=256) - def test_state_event_in_room(self): - self.ensure_user_joined_room() - self.set_alias_via_state_event(200) - def test_directory_in_room(self): self.ensure_user_joined_room() self.set_alias_via_directory(200) @@ -102,21 +106,6 @@ def test_room_creation(self): self.render(request) self.assertEqual(channel.code, 200, channel.result) - def set_alias_via_state_event(self, expected_code, alias_length=5): - url = "/_matrix/client/r0/rooms/%s/state/m.room.aliases/%s" % ( - self.room_id, - self.hs.hostname, - ) - - data = {"aliases": [self.random_alias(alias_length)]} - request_data = json.dumps(data) - - request, channel = self.make_request( - "PUT", url, request_data, access_token=self.user_tok - ) - self.render(request) - self.assertEqual(channel.code, expected_code, channel.result) - def set_alias_via_directory(self, expected_code, alias_length=5): url = "/_matrix/client/r0/directory/room/%s" % self.random_alias(alias_length) data = {"room_id": self.room_id} From e0992fcc5be9e850a5007d1d09fea79bea949cf6 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 30 Jan 2020 17:55:34 +0000 Subject: [PATCH 34/67] Log when we delete room in bg update (#6816) --- changelog.d/6816.misc | 1 + synapse/storage/data_stores/main/state.py | 2 ++ 2 files changed, 3 insertions(+) create mode 100644 changelog.d/6816.misc diff --git a/changelog.d/6816.misc b/changelog.d/6816.misc new file mode 100644 index 000000000000..a77ba1d7a5ca --- /dev/null +++ b/changelog.d/6816.misc @@ -0,0 +1 @@ +Add background update to clean out left rooms from current state. diff --git a/synapse/storage/data_stores/main/state.py b/synapse/storage/data_stores/main/state.py index 9b6f68e77764..4167f83c9b68 100644 --- a/synapse/storage/data_stores/main/state.py +++ b/synapse/storage/data_stores/main/state.py @@ -365,6 +365,8 @@ def _background_remove_left_rooms_txn(txn): left_rooms = set(room_ids) - joined_room_ids + logger.info("Deleting current state left rooms: %r", left_rooms) + # First we get all users that we still think were joined to the # room. This is so that we can mark those device lists as # potentially stale, since there may have been a period where the From 46a446828d1b4b1ca2d9b0dcae97323a1bbc0c0b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Thu, 30 Jan 2020 22:13:02 +0000 Subject: [PATCH 35/67] pass room version into FederationHandler.on_invite_request (#6805) --- changelog.d/6805.misc | 1 + synapse/federation/federation_server.py | 2 +- synapse/handlers/federation.py | 6 +++--- 3 files changed, 5 insertions(+), 4 deletions(-) create mode 100644 changelog.d/6805.misc diff --git a/changelog.d/6805.misc b/changelog.d/6805.misc new file mode 100644 index 000000000000..08aa80bcd91f --- /dev/null +++ b/changelog.d/6805.misc @@ -0,0 +1 @@ +Refactoring work in preparation for changing the event redaction algorithm. diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 8eddb3bf2c78..9562faa3ee13 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -410,7 +410,7 @@ async def on_invite_request(self, origin, content, room_version): origin_host, _ = parse_server_name(origin) await self.check_server_matches_acl(origin_host, pdu.room_id) pdu = await self._check_sigs_and_hash(room_version, pdu) - ret_pdu = await self.handler.on_invite_request(origin, pdu) + ret_pdu = await self.handler.on_invite_request(origin, pdu, room_version) time_now = self._clock.time_msec() return {"event": ret_pdu.get_pdu_json(time_now)} diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index ca484e545837..01372f6d4766 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1482,13 +1482,13 @@ def on_send_join_request(self, origin, pdu): return {"state": list(state.values()), "auth_chain": auth_chain} @defer.inlineCallbacks - def on_invite_request(self, origin, pdu): + def on_invite_request( + self, origin: str, event: EventBase, room_version: RoomVersion + ): """ We've got an invite event. Process and persist it. Sign it. Respond with the now signed event. """ - event = pdu - if event.state_key is None: raise SynapseError(400, "The invite event did not have a state key") From ef6bdafb29abe14cb40f6b83a46e70d82cd3e041 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 29 Jan 2020 17:55:48 +0000 Subject: [PATCH 36/67] Store the room version in EventBuilder --- synapse/events/builder.py | 12 +++++++----- tests/handlers/test_presence.py | 4 ++-- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/synapse/events/builder.py b/synapse/events/builder.py index 399775133726..291fb38a2671 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -23,6 +23,7 @@ KNOWN_EVENT_FORMAT_VERSIONS, KNOWN_ROOM_VERSIONS, EventFormatVersions, + RoomVersion, ) from synapse.crypto.event_signing import add_hashes_and_signatures from synapse.types import EventID @@ -40,7 +41,7 @@ class EventBuilder(object): content/unsigned/internal_metadata fields are still mutable) Attributes: - format_version (int): Event format version + room_version: Version of the target room room_id (str) type (str) sender (str) @@ -63,7 +64,7 @@ class EventBuilder(object): _hostname = attr.ib() _signing_key = attr.ib() - format_version = attr.ib() + room_version = attr.ib(type=RoomVersion) room_id = attr.ib() type = attr.ib() @@ -108,7 +109,8 @@ def build(self, prev_event_ids): ) auth_ids = yield self._auth.compute_auth_events(self, state_ids) - if self.format_version == EventFormatVersions.V1: + format_version = self.room_version.event_format + if format_version == EventFormatVersions.V1: auth_events = yield self._store.add_event_hashes(auth_ids) prev_events = yield self._store.add_event_hashes(prev_event_ids) else: @@ -148,7 +150,7 @@ def build(self, prev_event_ids): clock=self._clock, hostname=self._hostname, signing_key=self._signing_key, - format_version=self.format_version, + format_version=format_version, event_dict=event_dict, internal_metadata_dict=self.internal_metadata.get_dict(), ) @@ -201,7 +203,7 @@ def for_room_version(self, room_version, key_values): clock=self.clock, hostname=self.hostname, signing_key=self.signing_key, - format_version=room_version.event_format, + room_version=room_version, type=key_values["type"], state_key=key_values.get("state_key"), room_id=key_values["room_id"], diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index d4293b4312cb..69914428e203 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -19,7 +19,7 @@ from signedjson.key import generate_signing_key from synapse.api.constants import EventTypes, Membership, PresenceState -from synapse.events import room_version_to_event_format +from synapse.api.room_versions import KNOWN_ROOM_VERSIONS from synapse.events.builder import EventBuilder from synapse.handlers.presence import ( EXTERNAL_PROCESS_EXPIRY, @@ -597,7 +597,7 @@ def _add_new_user(self, room_id, user_id): clock=self.clock, hostname=hostname, signing_key=self.random_signing_key, - format_version=room_version_to_event_format(room_version), + room_version=KNOWN_ROOM_VERSIONS[room_version], room_id=room_id, type=EventTypes.Member, sender=user_id, From 54f3f369bd94ce22b3e052d4b795ad5d0c4618bc Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 29 Jan 2020 17:58:01 +0000 Subject: [PATCH 37/67] Pass room_version into create_local_event_from_event_dict --- synapse/events/builder.py | 40 +++++++++++-------------- synapse/federation/federation_client.py | 4 +-- 2 files changed, 19 insertions(+), 25 deletions(-) diff --git a/synapse/events/builder.py b/synapse/events/builder.py index 291fb38a2671..a26f4c9044ea 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -12,8 +12,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from typing import Optional import attr +from nacl.signing import SigningKey from twisted.internet import defer @@ -26,11 +28,15 @@ RoomVersion, ) from synapse.crypto.event_signing import add_hashes_and_signatures -from synapse.types import EventID +from synapse.events import ( + EventBase, + _EventInternalMetadata, + event_type_from_format_version, +) +from synapse.types import EventID, JsonDict +from synapse.util import Clock from synapse.util.stringutils import random_string -from . import _EventInternalMetadata, event_type_from_format_version - @attr.s(slots=True, cmp=False, frozen=True) class EventBuilder(object): @@ -150,7 +156,7 @@ def build(self, prev_event_ids): clock=self._clock, hostname=self._hostname, signing_key=self._signing_key, - format_version=format_version, + room_version=self.room_version, event_dict=event_dict, internal_metadata_dict=self.internal_metadata.get_dict(), ) @@ -216,29 +222,19 @@ def for_room_version(self, room_version, key_values): def create_local_event_from_event_dict( - clock, - hostname, - signing_key, - format_version, - event_dict, - internal_metadata_dict=None, -): + clock: Clock, + hostname: str, + signing_key: SigningKey, + room_version: RoomVersion, + event_dict: JsonDict, + internal_metadata_dict: Optional[JsonDict] = None, +) -> EventBase: """Takes a fully formed event dict, ensuring that fields like `origin` and `origin_server_ts` have correct values for a locally produced event, then signs and hashes it. - - Args: - clock (Clock) - hostname (str) - signing_key - format_version (int) - event_dict (dict) - internal_metadata_dict (dict|None) - - Returns: - FrozenEvent """ + format_version = room_version.event_format if format_version not in KNOWN_EVENT_FORMAT_VERSIONS: raise Exception("No event format defined for version %r" % (format_version,)) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index d57e8ca7a280..9be4b69cad0d 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -470,8 +470,6 @@ def send_request(destination): if not room_version: raise UnsupportedRoomVersionError() - event_format = room_version_to_event_format(room_version_id) - pdu_dict = ret.get("event", None) if not isinstance(pdu_dict, dict): raise InvalidResponseError("Bad 'event' field in response") @@ -490,7 +488,7 @@ def send_request(destination): self._clock, self.hostname, self.signing_key, - format_version=event_format, + room_version=room_version, event_dict=pdu_dict, ) From 2a81393a4b905c8bd4c31da04a8b4407462948b9 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 29 Jan 2020 17:40:33 +0000 Subject: [PATCH 38/67] Pass room_version into add_hashes_and_signatures --- synapse/crypto/event_signing.py | 20 +++++++++++++------- synapse/events/builder.py | 2 +- tests/crypto/test_event_signing.py | 9 +++++++-- 3 files changed, 21 insertions(+), 10 deletions(-) diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index e65bd61d973b..1f2bccf70000 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -20,10 +20,13 @@ from canonicaljson import encode_canonical_json from signedjson.sign import sign_json +from signedjson.types import SigningKey from unpaddedbase64 import decode_base64, encode_base64 from synapse.api.errors import Codes, SynapseError +from synapse.api.room_versions import RoomVersion from synapse.events.utils import prune_event, prune_event_dict +from synapse.types import JsonDict logger = logging.getLogger(__name__) @@ -137,20 +140,23 @@ def compute_event_signature(event_dict, signature_name, signing_key): def add_hashes_and_signatures( - event_dict, signature_name, signing_key, hash_algorithm=hashlib.sha256 + room_version: RoomVersion, + event_dict: JsonDict, + signature_name: str, + signing_key: SigningKey, ): """Add content hash and sign the event Args: - event_dict (dict): The event to add hashes to and sign - signature_name (str): The name of the entity signing the event + room_version: the version of the room this event is in + + event_dict: The event to add hashes to and sign + signature_name: The name of the entity signing the event (typically the server's hostname). - signing_key (syutil.crypto.SigningKey): The key to sign with - hash_algorithm: A hasher from `hashlib`, e.g. hashlib.sha256, to use - to hash the event + signing_key: The key to sign with """ - name, digest = compute_content_hash(event_dict, hash_algorithm=hash_algorithm) + name, digest = compute_content_hash(event_dict, hash_algorithm=hashlib.sha256) event_dict.setdefault("hashes", {})[name] = encode_base64(digest) diff --git a/synapse/events/builder.py b/synapse/events/builder.py index a26f4c9044ea..8d63ad6dc361 100644 --- a/synapse/events/builder.py +++ b/synapse/events/builder.py @@ -255,7 +255,7 @@ def create_local_event_from_event_dict( event_dict.setdefault("signatures", {}) - add_hashes_and_signatures(event_dict, hostname, signing_key) + add_hashes_and_signatures(room_version, event_dict, hostname, signing_key) return event_type_from_format_version(format_version)( event_dict, internal_metadata_dict=internal_metadata_dict ) diff --git a/tests/crypto/test_event_signing.py b/tests/crypto/test_event_signing.py index 126e1760048c..6143a50ab21f 100644 --- a/tests/crypto/test_event_signing.py +++ b/tests/crypto/test_event_signing.py @@ -17,6 +17,7 @@ import nacl.signing from unpaddedbase64 import decode_base64 +from synapse.api.room_versions import RoomVersions from synapse.crypto.event_signing import add_hashes_and_signatures from synapse.events import FrozenEvent @@ -49,7 +50,9 @@ def test_sign_minimal(self): "unsigned": {"age_ts": 1000000}, } - add_hashes_and_signatures(event_dict, HOSTNAME, self.signing_key) + add_hashes_and_signatures( + RoomVersions.V1, event_dict, HOSTNAME, self.signing_key + ) event = FrozenEvent(event_dict) @@ -81,7 +84,9 @@ def test_sign_message(self): "unsigned": {"age_ts": 1000000}, } - add_hashes_and_signatures(event_dict, HOSTNAME, self.signing_key) + add_hashes_and_signatures( + RoomVersions.V1, event_dict, HOSTNAME, self.signing_key + ) event = FrozenEvent(event_dict) From 540c5e168b3f7f22d7af905d6d01dcf2a615dff3 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Wed, 29 Jan 2020 18:19:06 +0000 Subject: [PATCH 39/67] changelog --- changelog.d/6806.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/6806.misc diff --git a/changelog.d/6806.misc b/changelog.d/6806.misc new file mode 100644 index 000000000000..08aa80bcd91f --- /dev/null +++ b/changelog.d/6806.misc @@ -0,0 +1 @@ +Refactoring work in preparation for changing the event redaction algorithm. From 7d846e870422c65f3fb436e5b0e543dae17719fc Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 31 Jan 2020 09:49:13 +0000 Subject: [PATCH 40/67] Fix bug with getting missing auth event during join 500'ed (#6810) --- changelog.d/6810.misc | 1 + synapse/handlers/federation.py | 6 +++++- 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 changelog.d/6810.misc diff --git a/changelog.d/6810.misc b/changelog.d/6810.misc new file mode 100644 index 000000000000..5537355bea8e --- /dev/null +++ b/changelog.d/6810.misc @@ -0,0 +1 @@ +Record room versions in the `rooms` table. diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 01372f6d4766..1f92640f8637 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1929,7 +1929,11 @@ def _persist_auth_tree( for e_id in missing_auth_events: m_ev = yield self.federation_client.get_pdu( - [origin], e_id, room_version=room_version, outlier=True, timeout=10000 + [origin], + e_id, + room_version=room_version.identifier, + outlier=True, + timeout=10000, ) if m_ev and m_ev.event_id == e_id: event_map[e_id] = m_ev From d7bf793cc1e3f5268285286341835ac54753eff6 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 31 Jan 2020 10:06:21 +0000 Subject: [PATCH 41/67] s/get_room_version/get_room_version_id/ ... to make way for a forthcoming get_room_version which returns a RoomVersion object. --- synapse/federation/federation_client.py | 10 +++++----- synapse/federation/federation_server.py | 16 ++++++++-------- synapse/handlers/federation.py | 18 +++++++++--------- synapse/handlers/message.py | 8 +++++--- synapse/handlers/pagination.py | 2 +- synapse/handlers/room.py | 2 +- synapse/state/__init__.py | 2 +- synapse/storage/data_stores/main/state.py | 2 +- synapse/storage/persist_events.py | 2 +- tests/handlers/test_presence.py | 2 +- tests/test_state.py | 2 +- tests/unittest.py | 4 +++- 12 files changed, 37 insertions(+), 33 deletions(-) diff --git a/synapse/federation/federation_client.py b/synapse/federation/federation_client.py index d57e8ca7a280..4ac3d81cba2a 100644 --- a/synapse/federation/federation_client.py +++ b/synapse/federation/federation_client.py @@ -198,7 +198,7 @@ def backfill(self, dest, room_id, limit, extremities): logger.debug("backfill transaction_data=%r", transaction_data) - room_version = yield self.store.get_room_version(room_id) + room_version = yield self.store.get_room_version_id(room_id) format_ver = room_version_to_event_format(room_version) pdus = [ @@ -336,7 +336,7 @@ def get_room_state_ids(self, destination: str, room_id: str, event_id: str): def get_event_auth(self, destination, room_id, event_id): res = yield self.transport_layer.get_event_auth(destination, room_id, event_id) - room_version = yield self.store.get_room_version(room_id) + room_version = yield self.store.get_room_version_id(room_id) format_ver = room_version_to_event_format(room_version) auth_chain = [ @@ -649,7 +649,7 @@ def _do_send_join(self, destination, pdu): @defer.inlineCallbacks def send_invite(self, destination, room_id, event_id, pdu): - room_version = yield self.store.get_room_version(room_id) + room_version = yield self.store.get_room_version_id(room_id) content = yield self._do_send_invite(destination, pdu, room_version) @@ -657,7 +657,7 @@ def send_invite(self, destination, room_id, event_id, pdu): logger.debug("Got response to send_invite: %s", pdu_dict) - room_version = yield self.store.get_room_version(room_id) + room_version = yield self.store.get_room_version_id(room_id) format_ver = room_version_to_event_format(room_version) pdu = event_from_pdu_json(pdu_dict, format_ver) @@ -859,7 +859,7 @@ def get_missing_events( timeout=timeout, ) - room_version = yield self.store.get_room_version(room_id) + room_version = yield self.store.get_room_version_id(room_id) format_ver = room_version_to_event_format(room_version) events = [ diff --git a/synapse/federation/federation_server.py b/synapse/federation/federation_server.py index 9562faa3ee13..a4c97ed458f3 100644 --- a/synapse/federation/federation_server.py +++ b/synapse/federation/federation_server.py @@ -234,7 +234,7 @@ async def _handle_pdus_in_txn( continue try: - room_version = await self.store.get_room_version(room_id) + room_version = await self.store.get_room_version_id(room_id) except NotFoundError: logger.info("Ignoring PDU for unknown room_id: %s", room_id) continue @@ -334,7 +334,7 @@ async def on_context_state_request(self, origin, room_id, event_id): ) ) - room_version = await self.store.get_room_version(room_id) + room_version = await self.store.get_room_version_id(room_id) resp["room_version"] = room_version return 200, resp @@ -385,7 +385,7 @@ async def on_make_join_request(self, origin, room_id, user_id, supported_version origin_host, _ = parse_server_name(origin) await self.check_server_matches_acl(origin_host, room_id) - room_version = await self.store.get_room_version(room_id) + room_version = await self.store.get_room_version_id(room_id) if room_version not in supported_versions: logger.warning( "Room version %s not in %s", room_version, supported_versions @@ -417,7 +417,7 @@ async def on_invite_request(self, origin, content, room_version): async def on_send_join_request(self, origin, content, room_id): logger.debug("on_send_join_request: content: %s", content) - room_version = await self.store.get_room_version(room_id) + room_version = await self.store.get_room_version_id(room_id) format_ver = room_version_to_event_format(room_version) pdu = event_from_pdu_json(content, format_ver) @@ -440,7 +440,7 @@ async def on_make_leave_request(self, origin, room_id, user_id): await self.check_server_matches_acl(origin_host, room_id) pdu = await self.handler.on_make_leave_request(origin, room_id, user_id) - room_version = await self.store.get_room_version(room_id) + room_version = await self.store.get_room_version_id(room_id) time_now = self._clock.time_msec() return {"event": pdu.get_pdu_json(time_now), "room_version": room_version} @@ -448,7 +448,7 @@ async def on_make_leave_request(self, origin, room_id, user_id): async def on_send_leave_request(self, origin, content, room_id): logger.debug("on_send_leave_request: content: %s", content) - room_version = await self.store.get_room_version(room_id) + room_version = await self.store.get_room_version_id(room_id) format_ver = room_version_to_event_format(room_version) pdu = event_from_pdu_json(content, format_ver) @@ -495,7 +495,7 @@ async def on_query_auth_request(self, origin, content, room_id, event_id): origin_host, _ = parse_server_name(origin) await self.check_server_matches_acl(origin_host, room_id) - room_version = await self.store.get_room_version(room_id) + room_version = await self.store.get_room_version_id(room_id) format_ver = room_version_to_event_format(room_version) auth_chain = [ @@ -664,7 +664,7 @@ async def _handle_received_pdu(self, origin, pdu): logger.info("Accepting join PDU %s from %s", pdu.event_id, origin) # We've already checked that we know the room version by this point - room_version = await self.store.get_room_version(pdu.room_id) + room_version = await self.store.get_room_version_id(pdu.room_id) # Check signature. try: diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 01372f6d4766..30c720f093dc 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -388,7 +388,7 @@ async def on_receive_pdu(self, origin, pdu, sent_to_us_directly=False) -> None: for x in remote_state: event_map[x.event_id] = x - room_version = await self.store.get_room_version(room_id) + room_version = await self.store.get_room_version_id(room_id) state_map = await resolve_events_with_store( room_id, room_version, @@ -1110,7 +1110,7 @@ async def _get_events_and_persist( Logs a warning if we can't find the given event. """ - room_version = await self.store.get_room_version(room_id) + room_version = await self.store.get_room_version_id(room_id) event_infos = [] @@ -1373,7 +1373,7 @@ def on_make_join_request(self, origin, room_id, user_id): event_content = {"membership": Membership.JOIN} - room_version = yield self.store.get_room_version(room_id) + room_version = yield self.store.get_room_version_id(room_id) builder = self.event_builder_factory.new( room_version, @@ -1607,7 +1607,7 @@ def on_make_leave_request(self, origin, room_id, user_id): ) raise SynapseError(403, "User not from origin", Codes.FORBIDDEN) - room_version = yield self.store.get_room_version(room_id) + room_version = yield self.store.get_room_version_id(room_id) builder = self.event_builder_factory.new( room_version, { @@ -2055,7 +2055,7 @@ def _check_for_soft_fail( do_soft_fail_check = False if do_soft_fail_check: - room_version = yield self.store.get_room_version(event.room_id) + room_version = yield self.store.get_room_version_id(event.room_id) room_version_obj = KNOWN_ROOM_VERSIONS[room_version] # Calculate the "current state". @@ -2191,7 +2191,7 @@ def do_auth(self, origin, event, context, auth_events): Returns: defer.Deferred[EventContext]: updated context object """ - room_version = yield self.store.get_room_version(event.room_id) + room_version = yield self.store.get_room_version_id(event.room_id) room_version_obj = KNOWN_ROOM_VERSIONS[room_version] try: @@ -2363,7 +2363,7 @@ def _update_auth_events_and_context_for_auth( remote_auth_events.update({(d.type, d.state_key): d for d in different_events}) remote_state = remote_auth_events.values() - room_version = yield self.store.get_room_version(event.room_id) + room_version = yield self.store.get_room_version_id(event.room_id) new_state = yield self.state_handler.resolve_events( room_version, (local_state, remote_state), event ) @@ -2587,7 +2587,7 @@ def exchange_third_party_invite( } if (yield self.auth.check_host_in_room(room_id, self.hs.hostname)): - room_version = yield self.store.get_room_version(room_id) + room_version = yield self.store.get_room_version_id(room_id) builder = self.event_builder_factory.new(room_version, event_dict) EventValidator().validate_builder(builder) @@ -2650,7 +2650,7 @@ def on_exchange_third_party_invite_request(self, room_id, event_dict): Returns: Deferred: resolves (to None) """ - room_version = yield self.store.get_room_version(room_id) + room_version = yield self.store.get_room_version_id(room_id) # NB: event_dict has a particular specced format we might need to fudge # if we change event formats too much. diff --git a/synapse/handlers/message.py b/synapse/handlers/message.py index 9a0f661b9b0c..bdf16c84d3e0 100644 --- a/synapse/handlers/message.py +++ b/synapse/handlers/message.py @@ -459,7 +459,9 @@ def create_event( room_version = event_dict["content"]["room_version"] else: try: - room_version = yield self.store.get_room_version(event_dict["room_id"]) + room_version = yield self.store.get_room_version_id( + event_dict["room_id"] + ) except NotFoundError: raise AuthError(403, "Unknown room") @@ -788,7 +790,7 @@ def handle_new_client_event( ): room_version = event.content.get("room_version", RoomVersions.V1.identifier) else: - room_version = yield self.store.get_room_version(event.room_id) + room_version = yield self.store.get_room_version_id(event.room_id) event_allowed = yield self.third_party_event_rules.check_event_allowed( event, context @@ -963,7 +965,7 @@ def is_inviter_member_event(e): auth_events = yield self.store.get_events(auth_events_ids) auth_events = {(e.type, e.state_key): e for e in auth_events.values()} - room_version = yield self.store.get_room_version(event.room_id) + room_version = yield self.store.get_room_version_id(event.room_id) room_version_obj = KNOWN_ROOM_VERSIONS[room_version] if event_auth.check_redaction( diff --git a/synapse/handlers/pagination.py b/synapse/handlers/pagination.py index 71d76202c93d..caf841a6433b 100644 --- a/synapse/handlers/pagination.py +++ b/synapse/handlers/pagination.py @@ -281,7 +281,7 @@ async def purge_room(self, room_id): """Purge the given room from the database""" with (await self.pagination_lock.write(room_id)): # check we know about the room - await self.store.get_room_version(room_id) + await self.store.get_room_version_id(room_id) # first check that we have no users in this room joined = await defer.maybeDeferred( diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index a95b45d7914b..138239955739 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -178,7 +178,7 @@ def _upgrade_room(self, requester, old_room_id, new_version): }, token_id=requester.access_token_id, ) - old_room_version = yield self.store.get_room_version(old_room_id) + old_room_version = yield self.store.get_room_version_id(old_room_id) yield self.auth.check_from_context( old_room_version, tombstone_event, tombstone_context ) diff --git a/synapse/state/__init__.py b/synapse/state/__init__.py index cacd0c0c2bbf..fdd6bef6b4ce 100644 --- a/synapse/state/__init__.py +++ b/synapse/state/__init__.py @@ -394,7 +394,7 @@ def resolve_state_groups_for_events(self, room_id, event_ids): delta_ids=delta_ids, ) - room_version = yield self.store.get_room_version(room_id) + room_version = yield self.store.get_room_version_id(room_id) result = yield self._state_resolution_handler.resolve_state_groups( room_id, diff --git a/synapse/storage/data_stores/main/state.py b/synapse/storage/data_stores/main/state.py index 4167f83c9b68..67009425235f 100644 --- a/synapse/storage/data_stores/main/state.py +++ b/synapse/storage/data_stores/main/state.py @@ -62,7 +62,7 @@ def __init__(self, database: Database, db_conn, hs): super(StateGroupWorkerStore, self).__init__(database, db_conn, hs) @cached(max_entries=10000) - async def get_room_version(self, room_id: str) -> str: + async def get_room_version_id(self, room_id: str) -> str: """Get the room_version of a given room Raises: diff --git a/synapse/storage/persist_events.py b/synapse/storage/persist_events.py index 86166fd4c17b..af3fd67ab94b 100644 --- a/synapse/storage/persist_events.py +++ b/synapse/storage/persist_events.py @@ -661,7 +661,7 @@ async def _get_new_state_after_events( break if not room_version: - room_version = await self.main_store.get_room_version(room_id) + room_version = await self.main_store.get_room_version_id(room_id) logger.debug("calling resolve_state_groups from preserve_events") res = await self._state_resolution_handler.resolve_state_groups( diff --git a/tests/handlers/test_presence.py b/tests/handlers/test_presence.py index d4293b4312cb..e92e090c3c38 100644 --- a/tests/handlers/test_presence.py +++ b/tests/handlers/test_presence.py @@ -588,7 +588,7 @@ def _add_new_user(self, room_id, user_id): hostname = get_domain_from_id(user_id) - room_version = self.get_success(self.store.get_room_version(room_id)) + room_version = self.get_success(self.store.get_room_version_id(room_id)) builder = EventBuilder( state=self.state, diff --git a/tests/test_state.py b/tests/test_state.py index e0aae06be478..1e4449fa1cfe 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -119,7 +119,7 @@ def register_event_context(self, event, context): def register_event_id_state_group(self, event_id, state_group): self._event_to_state_group[event_id] = state_group - def get_room_version(self, room_id): + def get_room_version_id(self, room_id): return RoomVersions.V1.identifier diff --git a/tests/unittest.py b/tests/unittest.py index b56e249386a6..98bf27d39cd8 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -589,7 +589,9 @@ def inject_room_member(self, room: str, user: str, membership: Membership) -> No event_builder_factory = self.hs.get_event_builder_factory() event_creation_handler = self.hs.get_event_creation_handler() - room_version = self.get_success(self.hs.get_datastore().get_room_version(room)) + room_version = self.get_success( + self.hs.get_datastore().get_room_version_id(room) + ) builder = event_builder_factory.for_room_version( KNOWN_ROOM_VERSIONS[room_version], From 08f41a6f05f304f6a14ab0339cf7225ec3d9851b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 31 Jan 2020 10:28:15 +0000 Subject: [PATCH 42/67] Add `get_room_version` method So that we can start factoring out some of this boilerplatey boilerplate. --- synapse/api/errors.py | 6 ++---- synapse/storage/data_stores/main/state.py | 25 ++++++++++++++++++++++- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/synapse/api/errors.py b/synapse/api/errors.py index 1c9456e58314..0c2060160089 100644 --- a/synapse/api/errors.py +++ b/synapse/api/errors.py @@ -402,11 +402,9 @@ class UnsupportedRoomVersionError(SynapseError): """The client's request to create a room used a room version that the server does not support.""" - def __init__(self): + def __init__(self, msg="Homeserver does not support this room version"): super(UnsupportedRoomVersionError, self).__init__( - code=400, - msg="Homeserver does not support this room version", - errcode=Codes.UNSUPPORTED_ROOM_VERSION, + code=400, msg=msg, errcode=Codes.UNSUPPORTED_ROOM_VERSION, ) diff --git a/synapse/storage/data_stores/main/state.py b/synapse/storage/data_stores/main/state.py index 67009425235f..3d34103e6776 100644 --- a/synapse/storage/data_stores/main/state.py +++ b/synapse/storage/data_stores/main/state.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,7 +23,8 @@ from twisted.internet import defer from synapse.api.constants import EventTypes, Membership -from synapse.api.errors import NotFoundError +from synapse.api.errors import NotFoundError, UnsupportedRoomVersionError +from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersion from synapse.events import EventBase from synapse.events.snapshot import EventContext from synapse.storage._base import SQLBaseStore @@ -61,6 +63,27 @@ class StateGroupWorkerStore(EventsWorkerStore, SQLBaseStore): def __init__(self, database: Database, db_conn, hs): super(StateGroupWorkerStore, self).__init__(database, db_conn, hs) + async def get_room_version(self, room_id: str) -> RoomVersion: + """Get the room_version of a given room + + Raises: + NotFoundError: if the room is unknown + + UnsupportedRoomVersionError: if the room uses an unknown room version. + Typically this happens if support for the room's version has been + removed from Synapse. + """ + room_version_id = await self.get_room_version_id(room_id) + v = KNOWN_ROOM_VERSIONS.get(room_version_id) + + if not v: + raise UnsupportedRoomVersionError( + "Room %s uses a room version %s which is no longer supported" + % (room_id, room_version_id) + ) + + return v + @cached(max_entries=10000) async def get_room_version_id(self, room_id: str) -> str: """Get the room_version of a given room From f6fa2c0b31e2b3695a91f34a06974f428bd5d45c Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Fri, 31 Jan 2020 10:30:29 +0000 Subject: [PATCH 43/67] newsfile --- changelog.d/6820.misc | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog.d/6820.misc diff --git a/changelog.d/6820.misc b/changelog.d/6820.misc new file mode 100644 index 000000000000..08aa80bcd91f --- /dev/null +++ b/changelog.d/6820.misc @@ -0,0 +1 @@ +Refactoring work in preparation for changing the event redaction algorithm. From 7f93eb190301024d373a573fc75a58f592469e9f Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Fri, 31 Jan 2020 13:47:43 +0000 Subject: [PATCH 44/67] pass room_version into compute_event_signature (#6807) --- changelog.d/6807.misc | 1 + synapse/crypto/event_signing.py | 28 ++++++++++++++++++++-------- synapse/handlers/federation.py | 5 ++++- 3 files changed, 25 insertions(+), 9 deletions(-) create mode 100644 changelog.d/6807.misc diff --git a/changelog.d/6807.misc b/changelog.d/6807.misc new file mode 100644 index 000000000000..08aa80bcd91f --- /dev/null +++ b/changelog.d/6807.misc @@ -0,0 +1 @@ +Refactoring work in preparation for changing the event redaction algorithm. diff --git a/synapse/crypto/event_signing.py b/synapse/crypto/event_signing.py index 1f2bccf70000..5f733c1cf557 100644 --- a/synapse/crypto/event_signing.py +++ b/synapse/crypto/event_signing.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- - +# # Copyright 2014-2016 OpenMarket Ltd +# Copyright 2020 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,6 +18,7 @@ import collections.abc import hashlib import logging +from typing import Dict from canonicaljson import encode_canonical_json from signedjson.sign import sign_json @@ -115,18 +117,28 @@ def compute_event_reference_hash(event, hash_algorithm=hashlib.sha256): return hashed.name, hashed.digest() -def compute_event_signature(event_dict, signature_name, signing_key): +def compute_event_signature( + room_version: RoomVersion, + event_dict: JsonDict, + signature_name: str, + signing_key: SigningKey, +) -> Dict[str, Dict[str, str]]: """Compute the signature of the event for the given name and key. Args: - event_dict (dict): The event as a dict - signature_name (str): The name of the entity signing the event + room_version: the version of the room that this event is in. + (the room version determines the redaction algorithm and hence the + json to be signed) + + event_dict: The event as a dict + + signature_name: The name of the entity signing the event (typically the server's hostname). - signing_key (syutil.crypto.SigningKey): The key to sign with + + signing_key: The key to sign with Returns: - dict[str, dict[str, str]]: Returns a dictionary in the same format of - an event's signatures field. + a dictionary in the same format of an event's signatures field. """ redact_json = prune_event_dict(event_dict) redact_json.pop("age_ts", None) @@ -161,5 +173,5 @@ def add_hashes_and_signatures( event_dict.setdefault("hashes", {})[name] = encode_base64(digest) event_dict["signatures"] = compute_event_signature( - event_dict, signature_name=signature_name, signing_key=signing_key + room_version, event_dict, signature_name=signature_name, signing_key=signing_key ) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 0f10c3e9b14f..c86d3177e9e2 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -1528,7 +1528,10 @@ def on_invite_request( event.signatures.update( compute_event_signature( - event.get_pdu_json(), self.hs.hostname, self.hs.config.signing_key[0] + room_version, + event.get_pdu_json(), + self.hs.hostname, + self.hs.config.signing_key[0], ) ) From 83b0ea047b355ade44985af123f4807faa7892ab Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 31 Jan 2020 14:04:15 +0000 Subject: [PATCH 45/67] Fix deleting of stale marker for device lists (#6819) We were in fact only deleting stale marker when we got an incremental update, rather than when we did a full resync. --- changelog.d/6819.misc | 1 + synapse/storage/data_stores/main/devices.py | 14 +++++++------- 2 files changed, 8 insertions(+), 7 deletions(-) create mode 100644 changelog.d/6819.misc diff --git a/changelog.d/6819.misc b/changelog.d/6819.misc new file mode 100644 index 000000000000..4f9a4ac7a593 --- /dev/null +++ b/changelog.d/6819.misc @@ -0,0 +1 @@ +Detect unknown remote devices and mark cache as stale. diff --git a/synapse/storage/data_stores/main/devices.py b/synapse/storage/data_stores/main/devices.py index a34415ff14ca..ea0503476f3f 100644 --- a/synapse/storage/data_stores/main/devices.py +++ b/synapse/storage/data_stores/main/devices.py @@ -940,13 +940,6 @@ def _update_remote_device_list_cache_entry_txn( lock=False, ) - # If we're replacing the remote user's device list cache presumably - # we've done a full resync, so we remove the entry that says we need - # to resync - self.db.simple_delete_txn( - txn, table="device_lists_remote_resync", keyvalues={"user_id": user_id}, - ) - def update_remote_device_list_cache(self, user_id, devices, stream_id): """Replace the entire cache of the remote user's devices. @@ -1003,6 +996,13 @@ def _update_remote_device_list_cache_txn(self, txn, user_id, devices, stream_id) lock=False, ) + # If we're replacing the remote user's device list cache presumably + # we've done a full resync, so we remove the entry that says we need + # to resync + self.db.simple_delete_txn( + txn, table="device_lists_remote_resync", keyvalues={"user_id": user_id}, + ) + @defer.inlineCallbacks def add_device_change_to_streams(self, user_id, device_ids, hosts): """Persist that a user's devices have been updated, and which hosts From ac0d45b78b647f6744b5850da88a4ca8c76666b9 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 31 Jan 2020 15:35:37 +0000 Subject: [PATCH 46/67] 1.10.0rc1 --- CHANGES.md | 44 ++++++++++++++++++++++++++++++++++++++++ UPGRADE.rst | 4 ++-- changelog.d/6729.misc | 1 - changelog.d/6734.bugfix | 1 - changelog.d/6748.misc | 1 - changelog.d/6751.misc | 1 - changelog.d/6757.misc | 1 - changelog.d/6761.bugfix | 1 - changelog.d/6767.bugfix | 1 - changelog.d/6771.bugfix | 1 - changelog.d/6775.doc | 1 - changelog.d/6776.misc | 1 - changelog.d/6786.misc | 1 - changelog.d/6787.feature | 1 - changelog.d/6788.misc | 1 - changelog.d/6790.feature | 1 - changelog.d/6792.misc | 1 - changelog.d/6794.feature | 1 - changelog.d/6795.bugfix | 1 - changelog.d/6796.bugfix | 1 - changelog.d/6797.misc | 1 - changelog.d/6799.bugfix | 1 - changelog.d/6800.bugfix | 1 - changelog.d/6801.bugfix | 1 - changelog.d/6802.misc | 1 - changelog.d/6803.misc | 1 - changelog.d/6805.misc | 1 - changelog.d/6806.misc | 1 - changelog.d/6807.misc | 1 - changelog.d/6810.misc | 1 - changelog.d/6811.bugfix | 1 - changelog.d/6816.misc | 1 - changelog.d/6819.misc | 1 - changelog.d/6820.misc | 1 - synapse/__init__.py | 2 +- 35 files changed, 47 insertions(+), 35 deletions(-) delete mode 100644 changelog.d/6729.misc delete mode 100644 changelog.d/6734.bugfix delete mode 100644 changelog.d/6748.misc delete mode 100644 changelog.d/6751.misc delete mode 100644 changelog.d/6757.misc delete mode 100644 changelog.d/6761.bugfix delete mode 100644 changelog.d/6767.bugfix delete mode 100644 changelog.d/6771.bugfix delete mode 100644 changelog.d/6775.doc delete mode 100644 changelog.d/6776.misc delete mode 100644 changelog.d/6786.misc delete mode 100644 changelog.d/6787.feature delete mode 100644 changelog.d/6788.misc delete mode 100644 changelog.d/6790.feature delete mode 100644 changelog.d/6792.misc delete mode 100644 changelog.d/6794.feature delete mode 100644 changelog.d/6795.bugfix delete mode 100644 changelog.d/6796.bugfix delete mode 100644 changelog.d/6797.misc delete mode 100644 changelog.d/6799.bugfix delete mode 100644 changelog.d/6800.bugfix delete mode 100644 changelog.d/6801.bugfix delete mode 100644 changelog.d/6802.misc delete mode 100644 changelog.d/6803.misc delete mode 100644 changelog.d/6805.misc delete mode 100644 changelog.d/6806.misc delete mode 100644 changelog.d/6807.misc delete mode 100644 changelog.d/6810.misc delete mode 100644 changelog.d/6811.bugfix delete mode 100644 changelog.d/6816.misc delete mode 100644 changelog.d/6819.misc delete mode 100644 changelog.d/6820.misc diff --git a/CHANGES.md b/CHANGES.md index 4c413b72ee1a..6686cafa5b74 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,47 @@ +Synapse 1.10.0rc1 (2020-01-31) +============================== + +Features +-------- + +- Implement updated authorization rules for aliases events, from [MSC2260](https://github.com/matrix-org/matrix-doc/pull/2260). ([\#6787](https://github.com/matrix-org/synapse/issues/6787), [\#6790](https://github.com/matrix-org/synapse/issues/6790), [\#6794](https://github.com/matrix-org/synapse/issues/6794)) + + +Bugfixes +-------- + +- Warn if postgres database has a non-C locale, as that can cause issues when upgrading locales (e.g. due to upgrading OS). ([\#6734](https://github.com/matrix-org/synapse/issues/6734)) +- Minor fixes to `PUT /_synapse/admin/v2/users` admin api. ([\#6761](https://github.com/matrix-org/synapse/issues/6761)) +- Validate `client_secret` parameter using the regex provided by the Client-Server API, temporarily allowing `:` characters for older clients. The `:` character will be removed in a future release. ([\#6767](https://github.com/matrix-org/synapse/issues/6767)) +- Fix persisting redaction events that have been redacted (or otherwise don't have a redacts key). ([\#6771](https://github.com/matrix-org/synapse/issues/6771)) +- Fix outbound federation request metrics. ([\#6795](https://github.com/matrix-org/synapse/issues/6795)) +- Fix bug where querying a remote user's device keys that weren't cached resulted in only returning a single device. ([\#6796](https://github.com/matrix-org/synapse/issues/6796)) +- Fix race in federation sender worker that delayed sending of device updates. ([\#6799](https://github.com/matrix-org/synapse/issues/6799), [\#6800](https://github.com/matrix-org/synapse/issues/6800)) +- Fix bug where Synapse didn't invalidate cache of remote users' devices when Synapse left a room. ([\#6801](https://github.com/matrix-org/synapse/issues/6801)) +- Fix waking up other workers when remote server is detected to have come back online. ([\#6811](https://github.com/matrix-org/synapse/issues/6811)) + + +Improved Documentation +---------------------- + +- Clarify documentation related to `user_dir` and `federation_reader` workers. ([\#6775](https://github.com/matrix-org/synapse/issues/6775)) + + +Internal Changes +---------------- + +- Record room versions in the `rooms` table. ([\#6729](https://github.com/matrix-org/synapse/issues/6729), [\#6788](https://github.com/matrix-org/synapse/issues/6788), [\#6810](https://github.com/matrix-org/synapse/issues/6810)) +- Propagate cache invalidates from workers to other workers. ([\#6748](https://github.com/matrix-org/synapse/issues/6748)) +- Remove some unnecessary admin handler abstraction methods. ([\#6751](https://github.com/matrix-org/synapse/issues/6751)) +- Add some debugging for media storage providers. ([\#6757](https://github.com/matrix-org/synapse/issues/6757)) +- Detect unknown remote devices and mark cache as stale. ([\#6776](https://github.com/matrix-org/synapse/issues/6776), [\#6819](https://github.com/matrix-org/synapse/issues/6819)) +- Attempt to resync remote users' devices when detected as stale. ([\#6786](https://github.com/matrix-org/synapse/issues/6786)) +- Delete current state from the database when server leaves a room. ([\#6792](https://github.com/matrix-org/synapse/issues/6792)) +- When a client asks for a remote user's device keys check if the local cache for that user has been marked as potentially stale. ([\#6797](https://github.com/matrix-org/synapse/issues/6797)) +- Add background update to clean out left rooms from current state. ([\#6802](https://github.com/matrix-org/synapse/issues/6802), [\#6816](https://github.com/matrix-org/synapse/issues/6816)) +- Refactoring work in preparation for changing the event redaction algorithm. ([\#6803](https://github.com/matrix-org/synapse/issues/6803), [\#6805](https://github.com/matrix-org/synapse/issues/6805), [\#6806](https://github.com/matrix-org/synapse/issues/6806), [\#6807](https://github.com/matrix-org/synapse/issues/6807), [\#6820](https://github.com/matrix-org/synapse/issues/6820)) + + Synapse 1.9.1 (2020-01-28) ========================== diff --git a/UPGRADE.rst b/UPGRADE.rst index 470246f12803..1c5db1c4a854 100644 --- a/UPGRADE.rst +++ b/UPGRADE.rst @@ -76,8 +76,8 @@ for example: dpkg -i matrix-synapse-py3_1.3.0+stretch1_amd64.deb -Upgrading to **** -=============================== +Upgrading to v1.10.0 +==================== Synapse will now log a warning on start up if used with a PostgreSQL database that has a non-recommended locale set. diff --git a/changelog.d/6729.misc b/changelog.d/6729.misc deleted file mode 100644 index 5537355bea8e..000000000000 --- a/changelog.d/6729.misc +++ /dev/null @@ -1 +0,0 @@ -Record room versions in the `rooms` table. diff --git a/changelog.d/6734.bugfix b/changelog.d/6734.bugfix deleted file mode 100644 index 79c6bab4d1c3..000000000000 --- a/changelog.d/6734.bugfix +++ /dev/null @@ -1 +0,0 @@ -Warn if postgres database has a non-C locale, as that can cause issues when upgrading locales (e.g. due to upgrading OS). diff --git a/changelog.d/6748.misc b/changelog.d/6748.misc deleted file mode 100644 index de320d4cd9a7..000000000000 --- a/changelog.d/6748.misc +++ /dev/null @@ -1 +0,0 @@ -Propagate cache invalidates from workers to other workers. diff --git a/changelog.d/6751.misc b/changelog.d/6751.misc deleted file mode 100644 index 722252052818..000000000000 --- a/changelog.d/6751.misc +++ /dev/null @@ -1 +0,0 @@ -Remove some unnecessary admin handler abstraction methods. \ No newline at end of file diff --git a/changelog.d/6757.misc b/changelog.d/6757.misc deleted file mode 100644 index a50c5e974a23..000000000000 --- a/changelog.d/6757.misc +++ /dev/null @@ -1 +0,0 @@ -Add some debugging for media storage providers. diff --git a/changelog.d/6761.bugfix b/changelog.d/6761.bugfix deleted file mode 100644 index 1c664c02dff3..000000000000 --- a/changelog.d/6761.bugfix +++ /dev/null @@ -1 +0,0 @@ -Minor fixes to `PUT /_synapse/admin/v2/users` admin api. diff --git a/changelog.d/6767.bugfix b/changelog.d/6767.bugfix deleted file mode 100644 index 63c7c63315e9..000000000000 --- a/changelog.d/6767.bugfix +++ /dev/null @@ -1 +0,0 @@ -Validate `client_secret` parameter using the regex provided by the Client-Server API, temporarily allowing `:` characters for older clients. The `:` character will be removed in a future release. diff --git a/changelog.d/6771.bugfix b/changelog.d/6771.bugfix deleted file mode 100644 index 623ba24acb83..000000000000 --- a/changelog.d/6771.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix persisting redaction events that have been redacted (or otherwise don't have a redacts key). diff --git a/changelog.d/6775.doc b/changelog.d/6775.doc deleted file mode 100644 index c6078ef82d13..000000000000 --- a/changelog.d/6775.doc +++ /dev/null @@ -1 +0,0 @@ -Clarify documentation related to `user_dir` and `federation_reader` workers. diff --git a/changelog.d/6776.misc b/changelog.d/6776.misc deleted file mode 100644 index 4f9a4ac7a593..000000000000 --- a/changelog.d/6776.misc +++ /dev/null @@ -1 +0,0 @@ -Detect unknown remote devices and mark cache as stale. diff --git a/changelog.d/6786.misc b/changelog.d/6786.misc deleted file mode 100644 index 94c692e53a7b..000000000000 --- a/changelog.d/6786.misc +++ /dev/null @@ -1 +0,0 @@ -Attempt to resync remote users' devices when detected as stale. diff --git a/changelog.d/6787.feature b/changelog.d/6787.feature deleted file mode 100644 index df9e4b77abde..000000000000 --- a/changelog.d/6787.feature +++ /dev/null @@ -1 +0,0 @@ -Implement updated authorization rules for aliases events, from [MSC2260](https://github.com/matrix-org/matrix-doc/pull/2260). diff --git a/changelog.d/6788.misc b/changelog.d/6788.misc deleted file mode 100644 index 5537355bea8e..000000000000 --- a/changelog.d/6788.misc +++ /dev/null @@ -1 +0,0 @@ -Record room versions in the `rooms` table. diff --git a/changelog.d/6790.feature b/changelog.d/6790.feature deleted file mode 100644 index df9e4b77abde..000000000000 --- a/changelog.d/6790.feature +++ /dev/null @@ -1 +0,0 @@ -Implement updated authorization rules for aliases events, from [MSC2260](https://github.com/matrix-org/matrix-doc/pull/2260). diff --git a/changelog.d/6792.misc b/changelog.d/6792.misc deleted file mode 100644 index fa31d509b39f..000000000000 --- a/changelog.d/6792.misc +++ /dev/null @@ -1 +0,0 @@ -Delete current state from the database when server leaves a room. diff --git a/changelog.d/6794.feature b/changelog.d/6794.feature deleted file mode 100644 index df9e4b77abde..000000000000 --- a/changelog.d/6794.feature +++ /dev/null @@ -1 +0,0 @@ -Implement updated authorization rules for aliases events, from [MSC2260](https://github.com/matrix-org/matrix-doc/pull/2260). diff --git a/changelog.d/6795.bugfix b/changelog.d/6795.bugfix deleted file mode 100644 index d1585653b149..000000000000 --- a/changelog.d/6795.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix outbound federation request metrics. diff --git a/changelog.d/6796.bugfix b/changelog.d/6796.bugfix deleted file mode 100644 index 206a15731151..000000000000 --- a/changelog.d/6796.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug where querying a remote user's device keys that weren't cached resulted in only returning a single device. diff --git a/changelog.d/6797.misc b/changelog.d/6797.misc deleted file mode 100644 index e9127bac517c..000000000000 --- a/changelog.d/6797.misc +++ /dev/null @@ -1 +0,0 @@ -When a client asks for a remote user's device keys check if the local cache for that user has been marked as potentially stale. diff --git a/changelog.d/6799.bugfix b/changelog.d/6799.bugfix deleted file mode 100644 index 322a2758affa..000000000000 --- a/changelog.d/6799.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix race in federation sender worker that delayed sending of device updates. diff --git a/changelog.d/6800.bugfix b/changelog.d/6800.bugfix deleted file mode 100644 index 322a2758affa..000000000000 --- a/changelog.d/6800.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix race in federation sender worker that delayed sending of device updates. diff --git a/changelog.d/6801.bugfix b/changelog.d/6801.bugfix deleted file mode 100644 index f401fa5d6930..000000000000 --- a/changelog.d/6801.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix bug where Synapse didn't invalidate cache of remote users' devices when Synapse left a room. diff --git a/changelog.d/6802.misc b/changelog.d/6802.misc deleted file mode 100644 index a77ba1d7a5ca..000000000000 --- a/changelog.d/6802.misc +++ /dev/null @@ -1 +0,0 @@ -Add background update to clean out left rooms from current state. diff --git a/changelog.d/6803.misc b/changelog.d/6803.misc deleted file mode 100644 index 08aa80bcd91f..000000000000 --- a/changelog.d/6803.misc +++ /dev/null @@ -1 +0,0 @@ -Refactoring work in preparation for changing the event redaction algorithm. diff --git a/changelog.d/6805.misc b/changelog.d/6805.misc deleted file mode 100644 index 08aa80bcd91f..000000000000 --- a/changelog.d/6805.misc +++ /dev/null @@ -1 +0,0 @@ -Refactoring work in preparation for changing the event redaction algorithm. diff --git a/changelog.d/6806.misc b/changelog.d/6806.misc deleted file mode 100644 index 08aa80bcd91f..000000000000 --- a/changelog.d/6806.misc +++ /dev/null @@ -1 +0,0 @@ -Refactoring work in preparation for changing the event redaction algorithm. diff --git a/changelog.d/6807.misc b/changelog.d/6807.misc deleted file mode 100644 index 08aa80bcd91f..000000000000 --- a/changelog.d/6807.misc +++ /dev/null @@ -1 +0,0 @@ -Refactoring work in preparation for changing the event redaction algorithm. diff --git a/changelog.d/6810.misc b/changelog.d/6810.misc deleted file mode 100644 index 5537355bea8e..000000000000 --- a/changelog.d/6810.misc +++ /dev/null @@ -1 +0,0 @@ -Record room versions in the `rooms` table. diff --git a/changelog.d/6811.bugfix b/changelog.d/6811.bugfix deleted file mode 100644 index 361f2fc2e816..000000000000 --- a/changelog.d/6811.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix waking up other workers when remote server is detected to have come back online. diff --git a/changelog.d/6816.misc b/changelog.d/6816.misc deleted file mode 100644 index a77ba1d7a5ca..000000000000 --- a/changelog.d/6816.misc +++ /dev/null @@ -1 +0,0 @@ -Add background update to clean out left rooms from current state. diff --git a/changelog.d/6819.misc b/changelog.d/6819.misc deleted file mode 100644 index 4f9a4ac7a593..000000000000 --- a/changelog.d/6819.misc +++ /dev/null @@ -1 +0,0 @@ -Detect unknown remote devices and mark cache as stale. diff --git a/changelog.d/6820.misc b/changelog.d/6820.misc deleted file mode 100644 index 08aa80bcd91f..000000000000 --- a/changelog.d/6820.misc +++ /dev/null @@ -1 +0,0 @@ -Refactoring work in preparation for changing the event redaction algorithm. diff --git a/synapse/__init__.py b/synapse/__init__.py index a236888d3cbe..bd942d3e1c88 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -36,7 +36,7 @@ except ImportError: pass -__version__ = "1.9.1" +__version__ = "1.10.0rc1" if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): # We import here so that we don't have to install a bunch of deps when From 0f8ffa38b5b5137c74c6bf088f5f654553170e11 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 31 Jan 2020 15:38:16 +0000 Subject: [PATCH 47/67] Fix link in upgrade.rst --- UPGRADE.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/UPGRADE.rst b/UPGRADE.rst index 1c5db1c4a854..3cad8c283765 100644 --- a/UPGRADE.rst +++ b/UPGRADE.rst @@ -82,7 +82,7 @@ Upgrading to v1.10.0 Synapse will now log a warning on start up if used with a PostgreSQL database that has a non-recommended locale set. -See [docs/postgres.md](docs/postgres.md) for details. +See `docs/postgres.md `_ for details. Upgrading to v1.8.0 From 68ef7ebbef3f6ccc697d68f11b74a3f65613379f Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Fri, 31 Jan 2020 15:45:08 +0000 Subject: [PATCH 48/67] Update changelog --- CHANGES.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 6686cafa5b74..ab6fce3e7dac 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,10 +1,13 @@ Synapse 1.10.0rc1 (2020-01-31) ============================== +**WARNING**: As of this release Synapse validates `client_secret` parameters in the Client-Server API as per the spec. See [\#6766](https://github.com/matrix-org/synapse/issues/6766) for details. + + Features -------- -- Implement updated authorization rules for aliases events, from [MSC2260](https://github.com/matrix-org/matrix-doc/pull/2260). ([\#6787](https://github.com/matrix-org/synapse/issues/6787), [\#6790](https://github.com/matrix-org/synapse/issues/6790), [\#6794](https://github.com/matrix-org/synapse/issues/6794)) +- Add experimental support for updated authorization rules for aliases events, from [MSC2260](https://github.com/matrix-org/matrix-doc/pull/2260). ([\#6787](https://github.com/matrix-org/synapse/issues/6787), [\#6790](https://github.com/matrix-org/synapse/issues/6790), [\#6794](https://github.com/matrix-org/synapse/issues/6794)) Bugfixes From 6475382d807e1fed095d1e3fbd04884799ebd612 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Tue, 4 Feb 2020 17:25:54 +0000 Subject: [PATCH 49/67] Fix detecting unknown devices from remote encrypted events. (#6848) We were looking at the wrong event type (`m.room.encryption` vs `m.room.encrypted`). Also fixup the duplicate `EvenTypes` entries. Introduced in #6776. --- changelog.d/6848.bugfix | 1 + synapse/api/constants.py | 3 +-- synapse/handlers/federation.py | 2 +- synapse/handlers/room.py | 2 +- synapse/handlers/stats.py | 2 +- synapse/storage/data_stores/main/stats.py | 4 ++-- 6 files changed, 7 insertions(+), 7 deletions(-) create mode 100644 changelog.d/6848.bugfix diff --git a/changelog.d/6848.bugfix b/changelog.d/6848.bugfix new file mode 100644 index 000000000000..65688e5d5726 --- /dev/null +++ b/changelog.d/6848.bugfix @@ -0,0 +1 @@ +Fix detecting unknown devices from remote encrypted events. diff --git a/synapse/api/constants.py b/synapse/api/constants.py index 0ade47e62404..cc8577552b16 100644 --- a/synapse/api/constants.py +++ b/synapse/api/constants.py @@ -77,12 +77,11 @@ class EventTypes(object): Aliases = "m.room.aliases" Redaction = "m.room.redaction" ThirdPartyInvite = "m.room.third_party_invite" - Encryption = "m.room.encryption" RelatedGroups = "m.room.related_groups" RoomHistoryVisibility = "m.room.history_visibility" CanonicalAlias = "m.room.canonical_alias" - Encryption = "m.room.encryption" + Encrypted = "m.room.encrypted" RoomAvatar = "m.room.avatar" RoomEncryption = "m.room.encryption" GuestAccess = "m.room.guest_access" diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index c86d3177e9e2..488200a2d1e3 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -752,7 +752,7 @@ async def _process_received_pdu( # For encrypted messages we check that we know about the sending device, # if we don't then we mark the device cache for that user as stale. - if event.type == EventTypes.Encryption: + if event.type == EventTypes.Encrypted: device_id = event.content.get("device_id") if device_id is not None: cached_devices = await self.store.get_cached_devices_for_user( diff --git a/synapse/handlers/room.py b/synapse/handlers/room.py index 138239955739..b609a65f47d8 100644 --- a/synapse/handlers/room.py +++ b/synapse/handlers/room.py @@ -360,7 +360,7 @@ def clone_existing_room( (EventTypes.RoomHistoryVisibility, ""), (EventTypes.GuestAccess, ""), (EventTypes.RoomAvatar, ""), - (EventTypes.Encryption, ""), + (EventTypes.RoomEncryption, ""), (EventTypes.ServerACL, ""), (EventTypes.RelatedGroups, ""), (EventTypes.PowerLevels, ""), diff --git a/synapse/handlers/stats.py b/synapse/handlers/stats.py index 7f7d56390ea5..68e6edace530 100644 --- a/synapse/handlers/stats.py +++ b/synapse/handlers/stats.py @@ -286,7 +286,7 @@ def _handle_deltas(self, deltas): room_state["history_visibility"] = event_content.get( "history_visibility" ) - elif typ == EventTypes.Encryption: + elif typ == EventTypes.RoomEncryption: room_state["encryption"] = event_content.get("algorithm") elif typ == EventTypes.Name: room_state["name"] = event_content.get("name") diff --git a/synapse/storage/data_stores/main/stats.py b/synapse/storage/data_stores/main/stats.py index 7bc186e9a1c8..7af1495e4713 100644 --- a/synapse/storage/data_stores/main/stats.py +++ b/synapse/storage/data_stores/main/stats.py @@ -744,7 +744,7 @@ def _fetch_current_state_stats(txn): EventTypes.Create, EventTypes.JoinRules, EventTypes.RoomHistoryVisibility, - EventTypes.Encryption, + EventTypes.RoomEncryption, EventTypes.Name, EventTypes.Topic, EventTypes.RoomAvatar, @@ -816,7 +816,7 @@ def _fetch_current_state_stats(txn): room_state["history_visibility"] = event.content.get( "history_visibility" ) - elif event.type == EventTypes.Encryption: + elif event.type == EventTypes.RoomEncryption: room_state["encryption"] = event.content.get("algorithm") elif event.type == EventTypes.Name: room_state["name"] = event.content.get("name") From 60d06724268891ad3b1e9dc6fe7cd080f9ba21b7 Mon Sep 17 00:00:00 2001 From: Hubert Chathi Date: Tue, 4 Feb 2020 12:03:54 -0500 Subject: [PATCH 50/67] Merge pull request #6844 from matrix-org/uhoreg/cross_signing_fix_device_fed add device signatures to device key query results --- changelog.d/6844.bugfix | 1 + synapse/storage/data_stores/main/devices.py | 10 ++++++++++ 2 files changed, 11 insertions(+) create mode 100644 changelog.d/6844.bugfix diff --git a/changelog.d/6844.bugfix b/changelog.d/6844.bugfix new file mode 100644 index 000000000000..e84aa1029f56 --- /dev/null +++ b/changelog.d/6844.bugfix @@ -0,0 +1 @@ +Fix an issue with cross-signing where device signatures were not sent to remote servers. diff --git a/synapse/storage/data_stores/main/devices.py b/synapse/storage/data_stores/main/devices.py index ea0503476f3f..b7617efb80a8 100644 --- a/synapse/storage/data_stores/main/devices.py +++ b/synapse/storage/data_stores/main/devices.py @@ -320,6 +320,11 @@ def _get_device_update_edus_by_remote(self, destination, from_stream_id, query_m device_display_name = device.get("device_display_name", None) if device_display_name: result["device_display_name"] = device_display_name + if "signatures" in device: + for sig_user_id, sigs in device["signatures"].items(): + result["keys"].setdefault("signatures", {}).setdefault( + sig_user_id, {} + ).update(sigs) else: result["deleted"] = True @@ -524,6 +529,11 @@ def _get_devices_with_keys_by_user_txn(self, txn, user_id): device_display_name = device.get("device_display_name", None) if device_display_name: result["device_display_name"] = device_display_name + if "signatures" in device: + for sig_user_id, sigs in device["signatures"].items(): + result["keys"].setdefault("signatures", {}).setdefault( + sig_user_id, {} + ).update(sigs) results.append(result) From a58860e4802c31680ba43e59ec537984af9f5637 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Wed, 5 Feb 2020 14:02:39 +0000 Subject: [PATCH 51/67] Check sender_key matches on inbound encrypted events. (#6850) If they don't then the device lists are probably out of sync. --- changelog.d/6850.misc | 1 + synapse/handlers/device.py | 8 +++- synapse/handlers/federation.py | 72 ++++++++++++++++++++++++++++------ 3 files changed, 67 insertions(+), 14 deletions(-) create mode 100644 changelog.d/6850.misc diff --git a/changelog.d/6850.misc b/changelog.d/6850.misc new file mode 100644 index 000000000000..418569113f35 --- /dev/null +++ b/changelog.d/6850.misc @@ -0,0 +1 @@ +Detect unexpected sender keys on inbound encrypted events and resync device lists. diff --git a/synapse/handlers/device.py b/synapse/handlers/device.py index 26ef5e150c92..a9bd431486a7 100644 --- a/synapse/handlers/device.py +++ b/synapse/handlers/device.py @@ -598,7 +598,13 @@ def _handle_device_updates(self, user_id): # happens if we've missed updates. resync = yield self._need_to_do_resync(user_id, pending_updates) - logger.debug("Need to re-sync devices for %r? %r", user_id, resync) + if logger.isEnabledFor(logging.INFO): + logger.info( + "Received device list update for %s, requiring resync: %s. Devices: %s", + user_id, + resync, + ", ".join(u[0] for u in pending_updates), + ) if resync: yield self.user_device_resync(user_id) diff --git a/synapse/handlers/federation.py b/synapse/handlers/federation.py index 488200a2d1e3..e9441bbeff45 100644 --- a/synapse/handlers/federation.py +++ b/synapse/handlers/federation.py @@ -754,27 +754,73 @@ async def _process_received_pdu( # if we don't then we mark the device cache for that user as stale. if event.type == EventTypes.Encrypted: device_id = event.content.get("device_id") + sender_key = event.content.get("sender_key") + + cached_devices = await self.store.get_cached_devices_for_user(event.sender) + + resync = False # Whether we should resync device lists. + + device = None if device_id is not None: - cached_devices = await self.store.get_cached_devices_for_user( - event.sender - ) - if device_id not in cached_devices: + device = cached_devices.get(device_id) + if device is None: logger.info( "Received event from remote device not in our cache: %s %s", event.sender, device_id, ) - await self.store.mark_remote_user_device_cache_as_stale( - event.sender + resync = True + + # We also check if the `sender_key` matches what we expect. + if sender_key is not None: + # Figure out what sender key we're expecting. If we know the + # device and recognize the algorithm then we can work out the + # exact key to expect. Otherwise check it matches any key we + # have for that device. + if device: + keys = device.get("keys", {}).get("keys", {}) + + if event.content.get("algorithm") == "m.megolm.v1.aes-sha2": + # For this algorithm we expect a curve25519 key. + key_name = "curve25519:%s" % (device_id,) + current_keys = [keys.get(key_name)] + else: + # We don't know understand the algorithm, so we just + # check it matches a key for the device. + current_keys = keys.values() + elif device_id: + # We don't have any keys for the device ID. + current_keys = [] + else: + # The event didn't include a device ID, so we just look for + # keys across all devices. + current_keys = ( + key + for device in cached_devices + for key in device.get("keys", {}).get("keys", {}).values() ) - # Immediately attempt a resync in the background - if self.config.worker_app: - return run_in_background(self._user_device_resync, event.sender) - else: - return run_in_background( - self._device_list_updater.user_device_resync, event.sender - ) + # We now check that the sender key matches (one of) the expected + # keys. + if sender_key not in current_keys: + logger.info( + "Received event from remote device with unexpected sender key: %s %s: %s", + event.sender, + device_id or "", + sender_key, + ) + resync = True + + if resync: + await self.store.mark_remote_user_device_cache_as_stale(event.sender) + + # Immediately attempt a resync in the background + if self.config.worker_app: + return run_in_background(self._user_device_resync, event.sender) + else: + return run_in_background( + self._device_list_updater.user_device_resync, event.sender + ) @log_function async def backfill(self, dest, room_id, limit, extremities): From 6a7e90ad782bddce95fa0c7d93e56291aa31c33d Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Feb 2020 10:40:08 +0000 Subject: [PATCH 52/67] 1.10.0rc2 --- CHANGES.md | 16 ++++++++++++++++ changelog.d/6844.bugfix | 1 - changelog.d/6848.bugfix | 1 - changelog.d/6850.misc | 1 - synapse/__init__.py | 2 +- 5 files changed, 17 insertions(+), 4 deletions(-) delete mode 100644 changelog.d/6844.bugfix delete mode 100644 changelog.d/6848.bugfix delete mode 100644 changelog.d/6850.misc diff --git a/CHANGES.md b/CHANGES.md index ab6fce3e7dac..ee0e5d25e4de 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,19 @@ +Synapse 1.10.0rc2 (2020-02-06) +============================== + +Bugfixes +-------- + +- Fix an issue with cross-signing where device signatures were not sent to remote servers. ([\#6844](https://github.com/matrix-org/synapse/issues/6844)) +- Fix detecting unknown devices from remote encrypted events. ([\#6848](https://github.com/matrix-org/synapse/issues/6848)) + + +Internal Changes +---------------- + +- Detect unexpected sender keys on inbound encrypted events and resync device lists. ([\#6850](https://github.com/matrix-org/synapse/issues/6850)) + + Synapse 1.10.0rc1 (2020-01-31) ============================== diff --git a/changelog.d/6844.bugfix b/changelog.d/6844.bugfix deleted file mode 100644 index e84aa1029f56..000000000000 --- a/changelog.d/6844.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix an issue with cross-signing where device signatures were not sent to remote servers. diff --git a/changelog.d/6848.bugfix b/changelog.d/6848.bugfix deleted file mode 100644 index 65688e5d5726..000000000000 --- a/changelog.d/6848.bugfix +++ /dev/null @@ -1 +0,0 @@ -Fix detecting unknown devices from remote encrypted events. diff --git a/changelog.d/6850.misc b/changelog.d/6850.misc deleted file mode 100644 index 418569113f35..000000000000 --- a/changelog.d/6850.misc +++ /dev/null @@ -1 +0,0 @@ -Detect unexpected sender keys on inbound encrypted events and resync device lists. diff --git a/synapse/__init__.py b/synapse/__init__.py index bd942d3e1c88..4f1859bd57bc 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -36,7 +36,7 @@ except ImportError: pass -__version__ = "1.10.0rc1" +__version__ = "1.10.0rc2" if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): # We import here so that we don't have to install a bunch of deps when From 4a50b674f2fa730fd3e85fe5d59b84b00e34bfc7 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Feb 2020 10:45:29 +0000 Subject: [PATCH 53/67] Update changelog --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index ee0e5d25e4de..e56f73848190 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -17,7 +17,7 @@ Internal Changes Synapse 1.10.0rc1 (2020-01-31) ============================== -**WARNING**: As of this release Synapse validates `client_secret` parameters in the Client-Server API as per the spec. See [\#6766](https://github.com/matrix-org/synapse/issues/6766) for details. +**WARNING to client developers**: As of this release Synapse validates `client_secret` parameters in the Client-Server API as per the spec. See [\#6766](https://github.com/matrix-org/synapse/issues/6766) for details. Features From b5176166b7b62f3c04c21f12a775982c99a90c9c Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Feb 2020 10:51:02 +0000 Subject: [PATCH 54/67] Update changelog --- CHANGES.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index e56f73848190..17c7c91c620d 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -5,13 +5,13 @@ Bugfixes -------- - Fix an issue with cross-signing where device signatures were not sent to remote servers. ([\#6844](https://github.com/matrix-org/synapse/issues/6844)) -- Fix detecting unknown devices from remote encrypted events. ([\#6848](https://github.com/matrix-org/synapse/issues/6848)) +- Fix to the unknown remote device detection which was indroduced in 1.10.rc1. ([\#6848](https://github.com/matrix-org/synapse/issues/6848)) Internal Changes ---------------- -- Detect unexpected sender keys on inbound encrypted events and resync device lists. ([\#6850](https://github.com/matrix-org/synapse/issues/6850)) +- Detect unexpected sender keys on remote encrypted events and resync device lists. ([\#6850](https://github.com/matrix-org/synapse/issues/6850)) Synapse 1.10.0rc1 (2020-01-31) From f663118155dbc242f99165f978817ef9dbeb9fd1 Mon Sep 17 00:00:00 2001 From: Erik Johnston Date: Thu, 6 Feb 2020 10:52:25 +0000 Subject: [PATCH 55/67] Update changelog --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 17c7c91c620d..c2aa7359080b 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -5,7 +5,7 @@ Bugfixes -------- - Fix an issue with cross-signing where device signatures were not sent to remote servers. ([\#6844](https://github.com/matrix-org/synapse/issues/6844)) -- Fix to the unknown remote device detection which was indroduced in 1.10.rc1. ([\#6848](https://github.com/matrix-org/synapse/issues/6848)) +- Fix to the unknown remote device detection which was introduced in 1.10.rc1. ([\#6848](https://github.com/matrix-org/synapse/issues/6848)) Internal Changes From fe73f0d533dcdcf11f069d89ebbff2ce88d16bb3 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Mon, 10 Feb 2020 00:41:20 +0000 Subject: [PATCH 56/67] Update setuptools for python 3.5 tests (#6880) Workaround for jaraco/zipp#40 --- .buildkite/scripts/test_old_deps.sh | 18 ++++++++++++++++++ changelog.d/6880.misc | 1 + 2 files changed, 19 insertions(+) create mode 100755 .buildkite/scripts/test_old_deps.sh create mode 100644 changelog.d/6880.misc diff --git a/.buildkite/scripts/test_old_deps.sh b/.buildkite/scripts/test_old_deps.sh new file mode 100755 index 000000000000..dfd71b251123 --- /dev/null +++ b/.buildkite/scripts/test_old_deps.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +# this script is run by buildkite in a plain `xenial` container; it installs the +# minimal requirements for tox and hands over to the py35-old tox environment. + +set -ex + +apt-get update +apt-get install -y python3.5 python3.5-dev python3-pip libxml2-dev libxslt-dev zlib1g-dev + +# workaround for https://github.com/jaraco/zipp/issues/40 +python3.5 -m pip install 'setuptools>=34.4.0' + +python3.5 -m pip install tox + +export LANG="C.UTF-8" + +exec tox -e py35-old,combine diff --git a/changelog.d/6880.misc b/changelog.d/6880.misc new file mode 100644 index 000000000000..8344a6ed1eed --- /dev/null +++ b/changelog.d/6880.misc @@ -0,0 +1 @@ +Fix continuous integration failures with old versions of `pip`, which were introduced by a release of the `zipp` library. From 8e64c5a24c26a733c0cfd3e997ea4079ae457096 Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Mon, 10 Feb 2020 09:36:23 +0000 Subject: [PATCH 57/67] filter out m.room.aliases from the CS API until a better solution is specced (#6878) We're in the middle of properly mitigating spam caused by malicious aliases being added to a room. However, until this work fully lands, we temporarily filter out all m.room.aliases events from /sync and /messages on the CS API, to remove abusive aliases. This is considered acceptable as m.room.aliases events were never a reliable record of the given alias->id mapping and were purely informational, and in their current state do more harm than good. --- changelog.d/6878.feature | 1 + synapse/visibility.py | 7 +++++++ 2 files changed, 8 insertions(+) create mode 100644 changelog.d/6878.feature diff --git a/changelog.d/6878.feature b/changelog.d/6878.feature new file mode 100644 index 000000000000..af3e958a4394 --- /dev/null +++ b/changelog.d/6878.feature @@ -0,0 +1 @@ +Filter out m.room.aliases from the CS API to mitigate abuse while a better solution is specced. diff --git a/synapse/visibility.py b/synapse/visibility.py index 100dc47a8a04..d0abd8f04ffc 100644 --- a/synapse/visibility.py +++ b/synapse/visibility.py @@ -122,6 +122,13 @@ def allowed(event): if not event.is_state() and event.sender in ignore_list: return None + # Until MSC2261 has landed we can't redact malicious alias events, so for + # now we temporarily filter out m.room.aliases entirely to mitigate + # abuse, while we spec a better solution to advertising aliases + # on rooms. + if event.type == EventTypes.Aliases: + return None + # Don't try to apply the room's retention policy if the event is a state event, as # MSC1763 states that retention is only considered for non-state events. if apply_retention_policies and not event.is_state(): From 3de57e706209d98a331265e6d5a51bfd24939a3b Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Mon, 10 Feb 2020 09:56:42 +0000 Subject: [PATCH 58/67] 1.10.0rc3 --- CHANGES.md | 15 +++++++++++++++ changelog.d/6878.feature | 1 - changelog.d/6880.misc | 1 - synapse/__init__.py | 2 +- 4 files changed, 16 insertions(+), 3 deletions(-) delete mode 100644 changelog.d/6878.feature delete mode 100644 changelog.d/6880.misc diff --git a/CHANGES.md b/CHANGES.md index c2aa7359080b..4a81a04627d8 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,18 @@ +Synapse 1.10.0rc3 (2020-02-10) +============================== + +Features +-------- + +- Filter out m.room.aliases from the CS API to mitigate abuse while a better solution is specced. ([\#6878](https://github.com/matrix-org/synapse/issues/6878)) + + +Internal Changes +---------------- + +- Fix continuous integration failures with old versions of `pip`, which were introduced by a release of the `zipp` library. ([\#6880](https://github.com/matrix-org/synapse/issues/6880)) + + Synapse 1.10.0rc2 (2020-02-06) ============================== diff --git a/changelog.d/6878.feature b/changelog.d/6878.feature deleted file mode 100644 index af3e958a4394..000000000000 --- a/changelog.d/6878.feature +++ /dev/null @@ -1 +0,0 @@ -Filter out m.room.aliases from the CS API to mitigate abuse while a better solution is specced. diff --git a/changelog.d/6880.misc b/changelog.d/6880.misc deleted file mode 100644 index 8344a6ed1eed..000000000000 --- a/changelog.d/6880.misc +++ /dev/null @@ -1 +0,0 @@ -Fix continuous integration failures with old versions of `pip`, which were introduced by a release of the `zipp` library. diff --git a/synapse/__init__.py b/synapse/__init__.py index 4f1859bd57bc..36c0cf557acc 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -36,7 +36,7 @@ except ImportError: pass -__version__ = "1.10.0rc2" +__version__ = "1.10.0rc3" if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): # We import here so that we don't have to install a bunch of deps when From 01209382fbb5ccf7bdef71f1d42f754b125f625a Mon Sep 17 00:00:00 2001 From: Matthew Hodgson Date: Mon, 10 Feb 2020 18:07:35 +0000 Subject: [PATCH 59/67] filter out m.room.aliases from /sync state blocks (#6884) We forgot to filter out aliases from /sync state blocks as well as the timeline. --- changelog.d/6884.feature | 1 + synapse/handlers/sync.py | 1 + 2 files changed, 2 insertions(+) create mode 100644 changelog.d/6884.feature diff --git a/changelog.d/6884.feature b/changelog.d/6884.feature new file mode 100644 index 000000000000..4f6b9630b821 --- /dev/null +++ b/changelog.d/6884.feature @@ -0,0 +1 @@ +Filter out m.room.aliases from /sync state blocks until a full fix lands. diff --git a/synapse/handlers/sync.py b/synapse/handlers/sync.py index cd95f85e3f5b..2b62fd83fd04 100644 --- a/synapse/handlers/sync.py +++ b/synapse/handlers/sync.py @@ -883,6 +883,7 @@ async def compute_state_delta( for e in sync_config.filter_collection.filter_room_state( list(state.values()) ) + if e.type != EventTypes.Aliases # until MSC2261 or alternative solution } async def unread_notifs_for_room_id(self, room_id, sync_config): From 3edc65dd24f1916501e24e2353b4523bc2e3635c Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 11 Feb 2020 10:43:16 +0000 Subject: [PATCH 60/67] 1.10.0rc4 --- CHANGES.md | 9 +++++++++ changelog.d/6884.feature | 1 - synapse/__init__.py | 2 +- 3 files changed, 10 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/6884.feature diff --git a/CHANGES.md b/CHANGES.md index 4a81a04627d8..f776560de76e 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,12 @@ +Synapse 1.10.0rc4 (2020-02-11) +============================== + +Features +-------- + +- Filter out m.room.aliases from /sync state blocks until a full fix lands. ([\#6884](https://github.com/matrix-org/synapse/issues/6884)) + + Synapse 1.10.0rc3 (2020-02-10) ============================== diff --git a/changelog.d/6884.feature b/changelog.d/6884.feature deleted file mode 100644 index 4f6b9630b821..000000000000 --- a/changelog.d/6884.feature +++ /dev/null @@ -1 +0,0 @@ -Filter out m.room.aliases from /sync state blocks until a full fix lands. diff --git a/synapse/__init__.py b/synapse/__init__.py index 36c0cf557acc..cc69ff0c41ca 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -36,7 +36,7 @@ except ImportError: pass -__version__ = "1.10.0rc3" +__version__ = "1.10.0rc4" if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): # We import here so that we don't have to install a bunch of deps when From 9e45d573d499796bd1878d026cdd820347c10c00 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 11 Feb 2020 10:50:55 +0000 Subject: [PATCH 61/67] changelog formatting --- CHANGES.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index f776560de76e..8f252bb1b2bb 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -4,7 +4,7 @@ Synapse 1.10.0rc4 (2020-02-11) Features -------- -- Filter out m.room.aliases from /sync state blocks until a full fix lands. ([\#6884](https://github.com/matrix-org/synapse/issues/6884)) +- Filter out `m.room.aliase`s from `/sync` state blocks until a full fix lands. ([\#6884](https://github.com/matrix-org/synapse/issues/6884)) Synapse 1.10.0rc3 (2020-02-10) @@ -13,7 +13,7 @@ Synapse 1.10.0rc3 (2020-02-10) Features -------- -- Filter out m.room.aliases from the CS API to mitigate abuse while a better solution is specced. ([\#6878](https://github.com/matrix-org/synapse/issues/6878)) +- Filter out `m.room.aliases` from the CS API to mitigate abuse while a better solution is specced. ([\#6878](https://github.com/matrix-org/synapse/issues/6878)) Internal Changes From aa7e4291ee55f4c1d817bfb4bf87930ae2f40aab Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 11 Feb 2020 10:51:13 +0000 Subject: [PATCH 62/67] Update CHANGES.md --- CHANGES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.md b/CHANGES.md index 8f252bb1b2bb..4248b7065b6b 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -4,7 +4,7 @@ Synapse 1.10.0rc4 (2020-02-11) Features -------- -- Filter out `m.room.aliase`s from `/sync` state blocks until a full fix lands. ([\#6884](https://github.com/matrix-org/synapse/issues/6884)) +- Filter out `m.room.aliases` from `/sync` state blocks until a full fix lands. ([\#6884](https://github.com/matrix-org/synapse/issues/6884)) Synapse 1.10.0rc3 (2020-02-10) From 78d170262c7349cbb85baa4ad0749ccb5e2794e4 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Tue, 11 Feb 2020 10:53:25 +0000 Subject: [PATCH 63/67] changelog wording --- CHANGES.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 4248b7065b6b..ab031fccdc49 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,10 +1,10 @@ Synapse 1.10.0rc4 (2020-02-11) ============================== -Features +Bugfixes -------- -- Filter out `m.room.aliases` from `/sync` state blocks until a full fix lands. ([\#6884](https://github.com/matrix-org/synapse/issues/6884)) +- Fix the filtering introduced in 1.10.0rc3 to also apply to the state blocks returned by `/sync`. ([\#6884](https://github.com/matrix-org/synapse/issues/6884)) Synapse 1.10.0rc3 (2020-02-10) From 856b2a9555b6c5a70b98ad2a1e14b7dbee0949f3 Mon Sep 17 00:00:00 2001 From: Richard van der Hoff Date: Tue, 11 Feb 2020 11:06:28 +0000 Subject: [PATCH 64/67] 1.10.0rc5 --- CHANGES.md | 6 +++++- synapse/__init__.py | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index ab031fccdc49..1995a70b1911 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,4 +1,4 @@ -Synapse 1.10.0rc4 (2020-02-11) +Synapse 1.10.0rc5 (2020-02-11) ============================== Bugfixes @@ -6,6 +6,10 @@ Bugfixes - Fix the filtering introduced in 1.10.0rc3 to also apply to the state blocks returned by `/sync`. ([\#6884](https://github.com/matrix-org/synapse/issues/6884)) +Synapse 1.10.0rc4 (2020-02-11) +============================== + +This release candidate was built incorrectly and is superceded by 1.10.0rc5. Synapse 1.10.0rc3 (2020-02-10) ============================== diff --git a/synapse/__init__.py b/synapse/__init__.py index cc69ff0c41ca..ba339004bafa 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -36,7 +36,7 @@ except ImportError: pass -__version__ = "1.10.0rc4" +__version__ = "1.10.0rc5" if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): # We import here so that we don't have to install a bunch of deps when From 3dd2b5f5e313c0350a1a64f1d5e40c8bdc46029c Mon Sep 17 00:00:00 2001 From: Richard van der Hoff <1389908+richvdh@users.noreply.github.com> Date: Wed, 12 Feb 2020 12:02:53 +0000 Subject: [PATCH 65/67] bump the version of Alpine Linux used in the docker images (#6897) --- changelog.d/6897.docker | 1 + docker/Dockerfile | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/6897.docker diff --git a/changelog.d/6897.docker b/changelog.d/6897.docker new file mode 100644 index 000000000000..8e7bcd719abd --- /dev/null +++ b/changelog.d/6897.docker @@ -0,0 +1 @@ +Update the docker images to Alpine Linux 3.11. diff --git a/docker/Dockerfile b/docker/Dockerfile index e5a0d6d5f662..93d61739ae81 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -16,7 +16,7 @@ ARG PYTHON_VERSION=3.7 ### ### Stage 0: builder ### -FROM docker.io/python:${PYTHON_VERSION}-alpine3.10 as builder +FROM docker.io/python:${PYTHON_VERSION}-alpine3.11 as builder # install the OS build deps From fdb816713aacb295fe804290f5043bdac866dcf5 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 12 Feb 2020 12:19:19 +0000 Subject: [PATCH 66/67] 1.10.0 --- CHANGES.md | 9 +++++++++ changelog.d/6897.docker | 1 - debian/changelog | 6 ++++++ synapse/__init__.py | 2 +- 4 files changed, 16 insertions(+), 2 deletions(-) delete mode 100644 changelog.d/6897.docker diff --git a/CHANGES.md b/CHANGES.md index 1995a70b1911..603a50054e4d 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,3 +1,12 @@ +Synapse 1.10.0 (2020-02-12) +=========================== + +Updates to the Docker image +--------------------------- + +- Update the docker images to Alpine Linux 3.11. ([\#6897](https://github.com/matrix-org/synapse/issues/6897)) + + Synapse 1.10.0rc5 (2020-02-11) ============================== diff --git a/changelog.d/6897.docker b/changelog.d/6897.docker deleted file mode 100644 index 8e7bcd719abd..000000000000 --- a/changelog.d/6897.docker +++ /dev/null @@ -1 +0,0 @@ -Update the docker images to Alpine Linux 3.11. diff --git a/debian/changelog b/debian/changelog index 74eb29c5eef7..cdc3b1a5c2dc 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,9 @@ +matrix-synapse-py3 (1.10.0) stable; urgency=medium + + * New synapse release 1.10.0. + + -- Synapse Packaging team Wed, 12 Feb 2020 12:18:54 +0000 + matrix-synapse-py3 (1.9.1) stable; urgency=medium * New synapse release 1.9.1. diff --git a/synapse/__init__.py b/synapse/__init__.py index ba339004bafa..9d285fca3889 100644 --- a/synapse/__init__.py +++ b/synapse/__init__.py @@ -36,7 +36,7 @@ except ImportError: pass -__version__ = "1.10.0rc5" +__version__ = "1.10.0" if bool(os.environ.get("SYNAPSE_TEST_PATCH_LOG_CONTEXTS", False)): # We import here so that we don't have to install a bunch of deps when From 7b8d654a6196d889c8e1c2a403f5176650216432 Mon Sep 17 00:00:00 2001 From: Brendan Abolivier Date: Wed, 12 Feb 2020 12:20:37 +0000 Subject: [PATCH 67/67] Move the warning at the top of the release changes --- CHANGES.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/CHANGES.md b/CHANGES.md index 603a50054e4d..0bce84f4001b 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,6 +1,8 @@ Synapse 1.10.0 (2020-02-12) =========================== +**WARNING to client developers**: As of this release Synapse validates `client_secret` parameters in the Client-Server API as per the spec. See [\#6766](https://github.com/matrix-org/synapse/issues/6766) for details. + Updates to the Docker image --------------------------- @@ -54,9 +56,6 @@ Internal Changes Synapse 1.10.0rc1 (2020-01-31) ============================== -**WARNING to client developers**: As of this release Synapse validates `client_secret` parameters in the Client-Server API as per the spec. See [\#6766](https://github.com/matrix-org/synapse/issues/6766) for details. - - Features --------