Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion scripts/elfgames/go/console_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ def check_player(self, batch, player):
return True, None

def print_msg(self, ret, msg):
print("\n%s %s\n\n" % (("=" if ret else "?"), msg))
print("%s %s\n" % (("=" if ret else "?"), msg))

def prompt(self, prompt_str, batch):
# Show last command results.
Expand Down
10 changes: 6 additions & 4 deletions scripts/elfgames/go/df_console.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,19 @@
import os
import sys

sys.path.append( os.path.expanduser('~/anaconda3/lib/python3.6/site-packages') )

import torch

from console_lib import GoConsoleGTP
from rlpytorch import Evaluator, load_env


def main():
# print('Python version:', sys.version)
# print('PyTorch version:', torch.__version__)
# print('CUDA version', torch.version.cuda)
# print('Conda env:', os.environ.get("CONDA_DEFAULT_ENV", ""))
print('Python version:', sys.version, file=sys.stderr)
print('PyTorch version:', torch.__version__, file=sys.stderr)
print('CUDA version', torch.version.cuda, file=sys.stderr)
print('Conda env:', os.environ.get("CONDA_DEFAULT_ENV", ""), file=sys.stderr)

additional_to_load = {
'evaluator': (
Expand Down
6 changes: 3 additions & 3 deletions src_py/elf/utils_elf.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def _alloc(p, gpu, use_numpy=True):
type_name = p.field().type_name()
sz = p.field().sz().vec()

# print(name, type_name, sz)
print(name, type_name, sz, file=sys.stderr)

if not use_numpy:
v = Allocator.torch_types[type_name](*sz)
Expand Down Expand Up @@ -63,7 +63,7 @@ def spec2batches(ctx, batchsize, spec, gpu, use_numpy=False, num_recv=1):
idx2name = dict()

for name, v in spec.items():
# print("%s: %s" % (name, v))
print("%s: %s" % (name, v), file=sys.stderr)
# TODO this might not good since it changes the input.
if "input" not in v or v["input"] is None:
v["input"] = []
Expand All @@ -74,7 +74,7 @@ def spec2batches(ctx, batchsize, spec, gpu, use_numpy=False, num_recv=1):
this_batchsize = v.get("batchsize", batchsize)

keys = list(set(v["input"] + v["reply"]))
# print("SharedMem: \"%s\", keys: %s" % (name, str(keys)))
print("SharedMem: \"%s\", keys: %s" % (name, str(keys)), file=sys.stderr)

smem_opts = ctx.createSharedMemOptions(name, this_batchsize)
smem_opts.setTimeout(v.get("timeout_usec", 0))
Expand Down
3 changes: 2 additions & 1 deletion src_py/rlpytorch/model_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import time
import torch
import warnings
import sys

from elf.options import import_options, PyOptionSpec
from elf import logging
Expand All @@ -25,7 +26,7 @@
def load_module(mod):
"""Load a python module."""
module = importlib.import_module(mod)
# print(module, mod)
print(module, mod, file=sys.stderr)
return module


Expand Down