Skip to content

Commit

Permalink
Merge pull request #97 from digital-asset/python-single-sandbox-tests-2
Browse files Browse the repository at this point in the history
python: Move the remaining unit tests off of starting their own Sandbox
  • Loading branch information
da-tanabe authored Aug 4, 2020
2 parents e6ed243 + bab63f3 commit b19facb
Show file tree
Hide file tree
Showing 17 changed files with 228 additions and 402 deletions.
14 changes: 14 additions & 0 deletions _fixtures/src/upload-test/UploadTest.daml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
-- UploadTest.daml
--
-- This is ONLY used to test DAR uploading functionality. The test that uploads
-- this DAR assumes it has not been uploaded by any other process before, so it
-- should only be used for that one test.
daml 1.2

module UploadTest where

template XYZ
with
party: Party
where
signatory party
10 changes: 10 additions & 0 deletions _fixtures/src/upload-test/daml.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
sdk-version: 0.13.32
name: upload-test
version: 1.0.0
source: UploadTest.daml
parties:
- Alice
- Bob
dependencies:
- daml-prim
- daml-stdlib
2 changes: 0 additions & 2 deletions python/dazl/cli/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,12 @@
from ._base import CliCommand
from .ls import ListAllCommand
from .metadata import PrintMetadataCommand
from .sandbox import SandboxCommand
from .upload import UploadCommand
from .version import VersionCommand

COMMANDS = [
ListAllCommand(),
PrintMetadataCommand(),
SandboxCommand(),
UploadCommand(),
VersionCommand(),
] # type: List[CliCommand]
Expand Down
31 changes: 0 additions & 31 deletions python/dazl/cli/sandbox.py

This file was deleted.

18 changes: 9 additions & 9 deletions python/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion python/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ typing_extensions = { version = "*", python = "<3.8.0" }
grpcio-tools = ">=1.20.1"
mypy = "*"
pympler = "*"
pytest = "*"
pytest = "^5"
pytest-asyncio = "*"
setuptools = "==40.8.0"
sphinx = "*"
Expand Down
56 changes: 56 additions & 0 deletions python/tests/unit/blocking_setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
from asyncio import set_event_loop, new_event_loop
from threading import Thread

from dazl import Party, Network
from dazl.model.core import Dar


def blocking_setup(url: str, dar: Dar) -> 'Party':
"""
Set up a ledger for a test in a completely blocking fashion.
Used by the tests that test the thread-safe variants of the dazl API where
avoiding contamination of the current async context is more important than
the performance ramifications of calling this function.
:param url:
The URL of the remote Ledger API implementation to connect to.
:param dar:
A DAR file.
:return:
A newly allocated ``Party`` that is guaranteed to be used by no other
client.
"""
return Setup(url, dar).run()


class Setup:
def __init__(self, url, dar):
self.url = url
self.party = None
self.dar = dar
self.network = None

def run(self):
# upload our DAR and allocate our Party in a completely separate thread as to try to avoid
# polluting the current context
t = Thread(target=self._main)
t.start()
t.join()
return self.party

def _main(self):
# create a private event loop just for us
set_event_loop(new_event_loop())

self.network = Network()
self.network.set_config(url=self.url)

client = self.network.aio_new_party()

self.party = client.party

self.network.run_until_complete(self.upload_dar())

async def upload_dar(self):
await self.network.aio_global().ensure_dar(self.dar)
1 change: 1 addition & 0 deletions python/tests/unit/dars.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ def load_dars() -> 'Mapping[str, Path]':
PostOffice = DARS['post-office']
Simple = DARS['simple']
TestServer = DARS['test-server']
UploadTest = DARS['upload-test']


if __name__ == '__main__':
Expand Down
39 changes: 24 additions & 15 deletions python/tests/unit/test_all_party.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,39 +2,48 @@
# SPDX-License-Identifier: Apache-2.0

import logging
import uuid

from dazl import sandbox, create, Network
import pytest

from .dars import AllParty
from dazl import async_network, create, Party

from .dars import AllParty as AllPartyDar

SOME_PARTY = 'SomeParty'
PUBLISHER = 'Publisher'
ALL_PARTY = '000'
PrivateContract = 'AllParty.PrivateContract'
PublicContract = 'AllParty.PublicContract'


def test_some_party_receives_public_contract():
@pytest.mark.asyncio
async def test_some_party_receives_public_contract(sandbox):
some_party_cids = []
publisher_cids = []
with sandbox(AllParty) as proc:
network = Network()
network.set_config(url=proc.url, party_groups=[ALL_PARTY])

some_client = network.aio_party(SOME_PARTY)
some_client.add_ledger_ready(lambda _: create(PrivateContract, {'someParty': SOME_PARTY}))
# TODO: Switch to a Party allocation API when available.
all_party = Party(str(uuid.uuid4()))

async with async_network(url=sandbox, dars=AllPartyDar) as network:
network.set_config(party_groups=[all_party])

publisher_client = network.aio_party(PUBLISHER)
publisher_client.add_ledger_ready(lambda _: create(PublicContract, {'publisher': PUBLISHER, 'allParty': ALL_PARTY}))
some_client = network.aio_new_party()
some_client.add_ledger_ready(
lambda _: create(PrivateContract, {'someParty': some_client.party}))

publisher_client = network.aio_new_party()
publisher_client.add_ledger_ready(
lambda _: create(PublicContract, {'publisher': publisher_client.party, 'allParty': all_party}))

some_client.add_ledger_created(PublicContract, lambda e: some_party_cids.append(e.cid))
some_client.add_ledger_created(PrivateContract, lambda e: some_party_cids.append(e.cid))

publisher_client.add_ledger_created(PublicContract, lambda e: publisher_cids.append(e.cid))
publisher_client.add_ledger_created(PrivateContract, lambda e: publisher_cids.append(e.cid))

network.run_until_complete()
network.start()

logging.info(
'got to the end with some_party contracts: %s and publisher contracts: %s',
some_party_cids, publisher_cids)

logging.info(f'got to the end with some_party contracts: {some_party_cids} and publisher contracts: {publisher_cids}')
assert len(some_party_cids) == 2
assert len(publisher_cids) == 1
71 changes: 37 additions & 34 deletions python/tests/unit/test_dar_upload.py
Original file line number Diff line number Diff line change
@@ -1,61 +1,64 @@
# Copyright (c) 2019 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0

from asyncio import new_event_loop, set_event_loop, sleep
import pytest
from asyncio import sleep

from dazl import Network, sandbox
from .dars import AllKindsOf
from dazl import Network
from .dars import UploadTest


def test_dar_uploads_near_startup():
set_event_loop(new_event_loop())

@pytest.mark.asyncio
async def test_dar_uploads_near_startup(sandbox):
package_ids = []

with sandbox([]) as proc:
network = Network()
network.set_config(url=proc.url)
network = Network()
network.set_config(url=sandbox)

async def upload_dars_and_verify():
await upload_test_dars(network)
metadata = await network.aio_global().metadata()
package_ids.extend(metadata.store.package_ids())
async def upload_dars_and_verify():
await upload_test_dars(network)
metadata = await network.aio_global().metadata()
package_ids.extend(metadata.store.package_ids())

network.run_until_complete(upload_dars_and_verify())
await network.aio_run(upload_dars_and_verify(), keep_open=False)

# Because we use a single sandbox process, it's somewhat difficult to assert that the specific
# DAR we are attempting to upload has indeed been uploaded, because packages are global and
# other tests will upload packages as well. However, we know that we HAVE indeed uploaded
# SOMETHING, and the Sandbox tests are started without any packages at all. So assume that a
# non-zero package ID list means that DAR uploading works.
assert len(package_ids) > 0


def test_package_events():
set_event_loop(new_event_loop())

@pytest.mark.asyncio
async def test_package_events(sandbox):
initial_events = []
follow_up_events = []

with sandbox([]) as proc:
network = Network()
network.set_config(url=proc.url)
client = network.aio_party('TestParty')
network = Network()
network.set_config(url=sandbox)
client = network.aio_new_party()

async def upload_dars_and_verify():
# make sure the client is "ready" before uploading DARs, because we are explicitly
# checking to make sure proper reporting of packages that are uploaded after a
# client is running and # operational
await client.ready()
await upload_test_dars(network)

async def upload_dars_and_verify():
# make sure the client is "ready" before uploading DARs, because we are explicitly
# checking to make sure proper reporting of packages that are uploaded after a
# client is running and # operational
await client.ready()
await upload_test_dars(network)
# give the client some time to pick up the new packages; unfortunately there isn't
# much more to do here except wait
await sleep(10)

# give the client some time to pick up the new packages; unfortunately there isn't
# much more to do here except wait
await sleep(10)
client.add_ledger_packages_added(lambda _: initial_events.append(_), initial=True)
client.add_ledger_packages_added(lambda _: follow_up_events.append(_))

client.add_ledger_packages_added(lambda _: initial_events.append(_), initial=True)
client.add_ledger_packages_added(lambda _: follow_up_events.append(_))
network.run_until_complete(upload_dars_and_verify())
await network.aio_run(upload_dars_and_verify(), keep_open=False)

assert len(initial_events) == 2
assert len(follow_up_events) == 1


async def upload_test_dars(network: 'Network'):
g = network.aio_global()
await g.ensure_dar(AllKindsOf.read_bytes())
await g.ensure_dar(UploadTest)
Loading

0 comments on commit b19facb

Please sign in to comment.