Skip to content

Commit

Permalink
Merge branch 'develop' into rav/catch_up_to_device_messages
Browse files Browse the repository at this point in the history
  • Loading branch information
richvdh authored Mar 21, 2024
2 parents a63ffc8 + db95b75 commit 0bf54b7
Show file tree
Hide file tree
Showing 10 changed files with 79 additions and 66 deletions.
1 change: 1 addition & 0 deletions changelog.d/16985.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Allow containers building on top of Synapse's Complement container is use the included PostgreSQL cluster.
1 change: 1 addition & 0 deletions changelog.d/17002.doc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Remove recommendation for a specific poetry version from contributing guide.
1 change: 1 addition & 0 deletions changelog.d/17017.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Patch the db conn pool sooner in tests.
2 changes: 1 addition & 1 deletion docker/complement/conf/postgres.supervisord.conf
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[program:postgres]
command=/usr/local/bin/prefix-log gosu postgres postgres

# Only start if START_POSTGRES=1
# Only start if START_POSTGRES=true
autostart=%(ENV_START_POSTGRES)s

# Lower priority number = starts first
Expand Down
5 changes: 3 additions & 2 deletions docker/complement/conf/start_for_complement.sh
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,9 @@ case "$SYNAPSE_COMPLEMENT_DATABASE" in
;;

sqlite|"")
# Configure supervisord not to start Postgres, as we don't need it
export START_POSTGRES=false
# Set START_POSTGRES to false unless it has already been set
# (i.e. by another container image inheriting our own).
export START_POSTGRES=${START_POSTGRES:-false}
;;

*)
Expand Down
2 changes: 1 addition & 1 deletion docs/development/contributing_guide.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ Of their installation methods, we recommend

```shell
pip install --user pipx
pipx install poetry==1.5.1 # Problems with Poetry 1.6, see https://github.com/matrix-org/synapse/issues/16147
pipx install poetry
```

but see poetry's [installation instructions](https://python-poetry.org/docs/#installation)
Expand Down
2 changes: 1 addition & 1 deletion docs/postgres.md
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ synapse_port_db --sqlite-database homeserver.db.snapshot \
--postgres-config homeserver-postgres.yaml
```

The flag `--curses` displays a coloured curses progress UI.
The flag `--curses` displays a coloured curses progress UI. (NOTE: if your terminal is too small the script will error out)

If the script took a long time to complete, or time has otherwise passed
since the original snapshot was taken, repeat the previous steps with a
Expand Down
2 changes: 1 addition & 1 deletion docs/setup/installation.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ for most users.
#### Docker images and Ansible playbooks

There is an official synapse image available at
<https://hub.docker.com/r/vectorim/synapse> or at [`ghcr.io/element-hq/synapse`](https://ghcr.io/element-hq/synapse)
<https://hub.docker.com/r/matrixdotorg/synapse> or at [`ghcr.io/element-hq/synapse`](https://ghcr.io/element-hq/synapse)
which can be used with the docker-compose file available at
[contrib/docker](https://github.com/element-hq/synapse/tree/develop/contrib/docker).
Further information on this including configuration options is available in the README
Expand Down
17 changes: 10 additions & 7 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

112 changes: 59 additions & 53 deletions tests/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,14 +47,15 @@
Union,
cast,
)
from unittest.mock import Mock
from unittest.mock import Mock, patch

import attr
from incremental import Version
from typing_extensions import ParamSpec
from zope.interface import implementer

import twisted
from twisted.enterprise import adbapi
from twisted.internet import address, tcp, threads, udp
from twisted.internet._resolver import SimpleResolverComplexifier
from twisted.internet.defer import Deferred, fail, maybeDeferred, succeed
Expand Down Expand Up @@ -94,8 +95,8 @@
)
from synapse.server import HomeServer
from synapse.storage import DataStore
from synapse.storage.database import LoggingDatabaseConnection
from synapse.storage.engines import create_engine
from synapse.storage.database import LoggingDatabaseConnection, make_pool
from synapse.storage.engines import BaseDatabaseEngine, create_engine
from synapse.storage.prepare_database import prepare_database
from synapse.types import ISynapseReactor, JsonDict
from synapse.util import Clock
Expand Down Expand Up @@ -670,6 +671,53 @@ def validate_connector(connector: tcp.Connector, expected_ip: str) -> None:
)


def make_fake_db_pool(
reactor: ISynapseReactor,
db_config: DatabaseConnectionConfig,
engine: BaseDatabaseEngine,
) -> adbapi.ConnectionPool:
"""Wrapper for `make_pool` which builds a pool which runs db queries synchronously.
For more deterministic testing, we don't use a regular db connection pool: instead
we run all db queries synchronously on the test reactor's main thread. This function
is a drop-in replacement for the normal `make_pool` which builds such a connection
pool.
"""
pool = make_pool(reactor, db_config, engine)

def runWithConnection(
func: Callable[..., R], *args: Any, **kwargs: Any
) -> Awaitable[R]:
return threads.deferToThreadPool(
pool._reactor,
pool.threadpool,
pool._runWithConnection,
func,
*args,
**kwargs,
)

def runInteraction(
desc: str, func: Callable[..., R], *args: Any, **kwargs: Any
) -> Awaitable[R]:
return threads.deferToThreadPool(
pool._reactor,
pool.threadpool,
pool._runInteraction,
desc,
func,
*args,
**kwargs,
)

pool.runWithConnection = runWithConnection # type: ignore[method-assign]
pool.runInteraction = runInteraction # type: ignore[assignment]
# Replace the thread pool with a threadless 'thread' pool
pool.threadpool = ThreadPool(reactor)
pool.running = True
return pool


class ThreadPool:
"""
Threadless thread pool.
Expand Down Expand Up @@ -706,52 +754,6 @@ def _(res: Any) -> None:
return d


def _make_test_homeserver_synchronous(server: HomeServer) -> None:
"""
Make the given test homeserver's database interactions synchronous.
"""

clock = server.get_clock()

for database in server.get_datastores().databases:
pool = database._db_pool

def runWithConnection(
func: Callable[..., R], *args: Any, **kwargs: Any
) -> Awaitable[R]:
return threads.deferToThreadPool(
pool._reactor,
pool.threadpool,
pool._runWithConnection,
func,
*args,
**kwargs,
)

def runInteraction(
desc: str, func: Callable[..., R], *args: Any, **kwargs: Any
) -> Awaitable[R]:
return threads.deferToThreadPool(
pool._reactor,
pool.threadpool,
pool._runInteraction,
desc,
func,
*args,
**kwargs,
)

pool.runWithConnection = runWithConnection # type: ignore[method-assign]
pool.runInteraction = runInteraction # type: ignore[assignment]
# Replace the thread pool with a threadless 'thread' pool
pool.threadpool = ThreadPool(clock._reactor)
pool.running = True

# We've just changed the Databases to run DB transactions on the same
# thread, so we need to disable the dedicated thread behaviour.
server.get_datastores().main.USE_DEDICATED_DB_THREADS_FOR_EVENT_FETCHING = False


def get_clock() -> Tuple[ThreadedMemoryReactorClock, Clock]:
clock = ThreadedMemoryReactorClock()
hs_clock = Clock(clock)
Expand Down Expand Up @@ -1067,7 +1069,14 @@ def setup_test_homeserver(
# Mock TLS
hs.tls_server_context_factory = Mock()

hs.setup()
# Patch `make_pool` before initialising the database, to make database transactions
# synchronous for testing.
with patch("synapse.storage.database.make_pool", side_effect=make_fake_db_pool):
hs.setup()

# Since we've changed the databases to run DB transactions on the same
# thread, we need to stop the event fetcher hogging that one thread.
hs.get_datastores().main.USE_DEDICATED_DB_THREADS_FOR_EVENT_FETCHING = False

if USE_POSTGRES_FOR_TESTS:
database_pool = hs.get_datastores().databases[0]
Expand Down Expand Up @@ -1137,9 +1146,6 @@ async def validate_hash(p: str, h: str) -> bool:

hs.get_auth_handler().validate_hash = validate_hash # type: ignore[assignment]

# Make the threadpool and database transactions synchronous for testing.
_make_test_homeserver_synchronous(hs)

# Load any configured modules into the homeserver
module_api = hs.get_module_api()
for module, module_config in hs.config.modules.loaded_modules:
Expand Down

0 comments on commit 0bf54b7

Please sign in to comment.