Skip to content

Commit 872237c

Browse files
pritamdamaniafacebook-github-bot
authored andcommittedJul 30, 2020
Output to stderr in distributed tests. (pytorch#42139)
Summary: Pull Request resolved: pytorch#42139 A bunch of tests were failing with buck since we would output to stdout and buck would fail parsing stdout in some cases. Moving these print statements to stderr fixes this issue. ghstack-source-id: 108606579 Test Plan: Run the offending unit tests. Reviewed By: mrshenli Differential Revision: D22779135 fbshipit-source-id: 789af3b16a03b68a6cb12377ed852e5b5091bbad
1 parent fe4f19e commit 872237c

File tree

7 files changed

+10
-8
lines changed

7 files changed

+10
-8
lines changed
 

‎test/distributed/test_c10d.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@
4040
load_tests = load_tests
4141

4242
if not c10d.is_available():
43-
print('c10d not available, skipping tests')
43+
print('c10d not available, skipping tests', file=sys.stderr)
4444
sys.exit(0)
4545

4646

‎test/distributed/test_c10d_spawn.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,12 @@
2020
load_tests = load_tests
2121

2222
if not c10d.is_available():
23-
print('c10d not available, skipping tests')
23+
print('c10d not available, skipping tests', file=sys.stderr)
2424
sys.exit(0)
2525

2626

2727
if NO_MULTIPROCESSING_SPAWN:
28-
print('spawn not available, skipping tests')
28+
print('spawn not available, skipping tests', file=sys.stderr)
2929
sys.exit(0)
3030

3131

‎test/distributed/test_distributed.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,7 @@ def get_timeout(test_id):
116116

117117

118118
if not dist.is_available():
119-
print("Distributed not available, skipping tests")
119+
print("Distributed not available, skipping tests", file=sys.stderr)
120120
sys.exit(0)
121121

122122

‎test/distributed/test_nccl.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import unittest
22

3+
import sys
34
import torch
45
import torch.cuda.nccl as nccl
56
import torch.cuda
@@ -18,7 +19,6 @@
1819

1920
nGPUs = torch.cuda.device_count()
2021
if not TEST_CUDA:
21-
import sys
2222
print('CUDA not available, skipping tests', file=sys.stderr)
2323
TestCase = object # noqa: F811
2424

‎test/test_cuda.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
TEST_MULTIGPU = TEST_CUDA and torch.cuda.device_count() >= 2
3838

3939
if not TEST_CUDA:
40-
print('CUDA not available, skipping tests')
40+
print('CUDA not available, skipping tests', file=sys.stderr)
4141
TestCase = object # noqa: F811
4242

4343
TEST_MAGMA = TEST_CUDA

‎test/test_cuda_primary_ctx.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import torch
22
from torch.testing._internal.common_utils import TestCase, run_tests, skipIfRocm
3+
import sys
34
import unittest
45

56
# NOTE: this needs to be run in a brand new process
@@ -12,7 +13,7 @@
1213
TEST_MULTIGPU = TEST_CUDA and torch.cuda.device_count() >= 2
1314

1415
if not TEST_CUDA:
15-
print('CUDA not available, skipping tests')
16+
print('CUDA not available, skipping tests', file=sys.stderr)
1617
TestCase = object # noqa: F811
1718

1819

‎torch/testing/_internal/dist_utils.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,15 @@
33
import time
44
from functools import partial, wraps
55
import re
6+
import sys
67

78
import torch.distributed as dist
89
import torch.distributed.rpc as rpc
910
from torch.distributed.rpc import _rref_context_get_debug_info
1011

1112

1213
if not dist.is_available():
13-
print("c10d not available, skipping tests")
14+
print("c10d not available, skipping tests", file=sys.stderr)
1415
sys.exit(0)
1516

1617

0 commit comments

Comments
 (0)
Please sign in to comment.