Multi-GPU Support
Hi,
Do you think to support Multi-GPU in the future?
Thanks.
I tried to use torch.nn.DataParallel(seq2seq_model).cuda() but it failed. It gives me this error:
`TypeError Traceback (most recent call last)
~/anaconda3/lib/python3.6/site-packages/torch/nn/modules/module.py in call(self, *input, **kwargs) 489 result = self._slow_forward(*input, **kwargs) 490 else: --> 491 result = self.forward(*input, **kwargs) 492 for hook in self._forward_hooks.values(): 493 hook_result = hook(self, input, result)
~/anaconda3/lib/python3.6/site-packages/torch/nn/parallel/data_parallel.py in forward(self, *inputs, **kwargs) 113 replicas = self.replicate(self.module, self.device_ids[:len(inputs)]) 114 outputs = self.parallel_apply(replicas, inputs, kwargs) --> 115 return self.gather(outputs, self.output_device) 116 117 def replicate(self, module, device_ids):
~/anaconda3/lib/python3.6/site-packages/torch/nn/parallel/data_parallel.py in gather(self, outputs, output_device) 125 126 def gather(self, outputs, output_device): --> 127 return gather(outputs, output_device, dim=self.dim) 128 129
~/anaconda3/lib/python3.6/site-packages/torch/nn/parallel/scatter_gather.py in gather(outputs, target_device, dim) 66 # Setting the function to None clears the refcycle. 67 try: ---> 68 return gather_map(outputs) 69 finally: 70 gather_map = None
~/anaconda3/lib/python3.6/site-packages/torch/nn/parallel/scatter_gather.py in gather_map(outputs) 61 return type(out)(((k, gather_map([d[k] for d in outputs])) 62 for k in out)) ---> 63 return type(out)(map(gather_map, zip(*outputs))) 64 65 # Recursive function calls like this create reference cycles.
~/anaconda3/lib/python3.6/site-packages/torch/nn/parallel/scatter_gather.py in gather_map(outputs) 60 raise ValueError('All dicts must have the same number of keys') 61 return type(out)(((k, gather_map([d[k] for d in outputs])) ---> 62 for k in out)) 63 return type(out)(map(gather_map, zip(*outputs))) 64
~/anaconda3/lib/python3.6/site-packages/torch/nn/parallel/scatter_gather.py in
~/anaconda3/lib/python3.6/site-packages/torch/nn/parallel/scatter_gather.py in gather_map(outputs) 61 return type(out)(((k, gather_map([d[k] for d in outputs])) 62 for k in out)) ---> 63 return type(out)(map(gather_map, zip(*outputs))) 64 65 # Recursive function calls like this create reference cycles.
~/anaconda3/lib/python3.6/site-packages/torch/nn/parallel/scatter_gather.py in gather_map(outputs) 61 return type(out)(((k, gather_map([d[k] for d in outputs])) 62 for k in out)) ---> 63 return type(out)(map(gather_map, zip(*outputs))) 64 65 # Recursive function calls like this create reference cycles.
TypeError: zip argument #1 must support iteration`
Any suggestions?
Thanks