Skip to content

Commit 8238f18

Browse files
amirafzalifacebook-github-bot
authored andcommitted
include pytest-asyncio and timeout
Differential Revision: D75028810
1 parent a11d9f6 commit 8238f18

File tree

4 files changed

+18
-2
lines changed

4 files changed

+18
-2
lines changed

.github/workflows/test.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ jobs:
5555
pip install pyzmq requests numpy pyre-extensions
5656
5757
# Test dependencies
58-
pip install pytest cloudpickle
58+
pip install pytest cloudpickle pytest-timeout pytest-asyncio
5959
6060
# Build and install monarch
6161
python setup.py install

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ pip install setuptools-rust
2828
# install torch, can use conda or build it yourself or whatever
2929
pip install torch
3030
# install other deps, see pyproject.toml for latest
31-
pip install pyzmq requests numpy pyre-extensions pytest-timeout cloudpickle
31+
pip install pyzmq requests numpy pyre-extensions pytest-timeout cloudpickle pytest-asyncio
3232

3333
# install the package
3434
python setup.py install

python/tests/_monarch/test_hyperactor.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
# pyre-strict
2+
# pyre-ignore-all-errors[56]
23

34
import multiprocessing
45
import os
@@ -73,12 +74,14 @@ def test_fn() -> None:
7374
assert code == signal.SIGTERM, code
7475

7576

77+
@pytest.mark.asyncio
7678
async def test_allocator() -> None:
7779
spec = AllocSpec(AllocConstraints(), replica=2)
7880
allocator = monarch.LocalAllocator()
7981
_ = await allocator.allocate(spec)
8082

8183

84+
@pytest.mark.asyncio
8285
async def test_proc_mesh() -> None:
8386
spec = AllocSpec(AllocConstraints(), replica=2)
8487
allocator = monarch.LocalAllocator()
@@ -87,6 +90,7 @@ async def test_proc_mesh() -> None:
8790
assert str(proc_mesh) == "<ProcMesh { shape: {replica=2} }>"
8891

8992

93+
@pytest.mark.asyncio
9094
async def test_actor_mesh() -> None:
9195
spec = AllocSpec(AllocConstraints(), replica=2)
9296
allocator = monarch.LocalAllocator()

python/tests/test_python_actors.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
import operator
22

3+
import pytest
4+
35
import torch
46

57
from monarch.proc_mesh import local_proc_mesh, proc_mesh
@@ -53,6 +55,7 @@ async def get_grad_buffer(self) -> torch.Tensor:
5355
return self.grad_buffer
5456

5557

58+
@pytest.mark.asyncio
5659
async def test_choose():
5760
proc = await local_proc_mesh(gpus=2)
5861
v = await proc.spawn("counter", Counter, 3)
@@ -64,6 +67,7 @@ async def test_choose():
6467
assert result == result2
6568

6669

70+
@pytest.mark.asyncio
6771
async def test_stream():
6872
proc = await local_proc_mesh(gpus=2)
6973
v = await proc.spawn("counter2", Counter, 3)
@@ -93,6 +97,7 @@ async def get_buffer(self):
9397
return self.buffer
9498

9599

100+
@pytest.mark.asyncio
96101
async def test_proc_mesh_rdma():
97102
proc = await proc_mesh(gpus=1)
98103
server = await proc.spawn("server", ParameterServer)
@@ -155,6 +160,7 @@ async def get(self, to: To):
155160
return [x async for x in to.whoami.stream()]
156161

157162

163+
@pytest.mark.asyncio
158164
async def test_mesh_passed_to_mesh():
159165
proc = await local_proc_mesh(gpus=2)
160166
f = await proc.spawn("from", From)
@@ -164,6 +170,7 @@ async def test_mesh_passed_to_mesh():
164170
assert all[0] != all[1]
165171

166172

173+
@pytest.mark.asyncio
167174
async def test_mesh_passed_to_mesh_on_different_proc_mesh():
168175
proc = await local_proc_mesh(gpus=2)
169176
proc2 = await local_proc_mesh(gpus=2)
@@ -174,6 +181,7 @@ async def test_mesh_passed_to_mesh_on_different_proc_mesh():
174181
assert all[0] != all[1]
175182

176183

184+
@pytest.mark.asyncio
177185
async def test_actor_slicing():
178186
proc = await local_proc_mesh(gpus=2)
179187
proc2 = await local_proc_mesh(gpus=2)
@@ -189,6 +197,7 @@ async def test_actor_slicing():
189197
assert result[0] == result[1]
190198

191199

200+
@pytest.mark.asyncio
192201
async def test_aggregate():
193202
proc = await local_proc_mesh(gpus=2)
194203
counter = await proc.spawn("counter", Counter, 1)
@@ -204,6 +213,7 @@ async def run(self, fn):
204213
return fn()
205214

206215

216+
@pytest.mark.asyncio
207217
async def test_rank_size():
208218
proc = await local_proc_mesh(gpus=2)
209219
r = await proc.spawn("runit", RunIt)
@@ -261,6 +271,7 @@ async def update_weights(self):
261271
), f"{torch.sum(self.generator.weight.data)=}, {self.step=}"
262272

263273

274+
@pytest.mark.asyncio
264275
async def test_gpu_trainer_generator():
265276
trainer_proc = await proc_mesh(gpus=1)
266277
gen_proc = await proc_mesh(gpus=1)
@@ -282,6 +293,7 @@ def sync_endpoint(self, a_counter: Counter):
282293
return a_counter.value.choose().get()
283294

284295

296+
@pytest.mark.asyncio
285297
async def test_sync_actor():
286298
proc = await local_proc_mesh(gpus=2)
287299
a = await proc.spawn("actor", SyncActor)

0 commit comments

Comments
 (0)