coremltools
coremltools copied to clipboard
Flexible size causes error for dynamic resizing
❓Question
I tried this code,
import pytorch_lightning as pl
import torch
import coremltools
from coremltools.models.neural_network import flexible_shape_utils
from torch import FloatTensor
import torch.nn as nn
class Decoder(pl.LightningModule):
def __init__(self):
super().__init__()
self.word_embed = nn.Sequential(
nn.Embedding(118, 256), nn.LayerNorm(256)
)
def forward(self, src: FloatTensor) -> FloatTensor:
# return torch.flatten(src, 0, 1)
return self.word_embed(src)
model = Decoder()
with torch.no_grad():
input_tensor = torch.ones([1, 10], dtype=torch.int32)
traced_model = torch.jit.trace(model.eval(), input_tensor, check_trace=True, check_tolerance=True)
coreml_model = coremltools.convert(
traced_model,
inputs=[coremltools.TensorType(shape=input_tensor.shape, dtype=int)]
)
coreml_model.save("decoder.mlmodel")
spec = coremltools.utils.load_spec('decoder.mlmodel')
input_name = spec.description.input[0].name
flexible_shape_utils.set_multiarray_ndshape_range(spec,
feature_name=input_name,
lower_bounds=[1, 5],
upper_bounds=[1, 10])
coremltools.utils.save_spec(spec, 'decoder.mlmodel')
loaded = coremltools.models.MLModel('decoder.mlmodel')
input_tensor = input_tensor.detach().numpy().copy()
out = loaded.predict({'src': input_tensor}) # this is OK
print(out)
input_tensor = torch.ones([1, 5], dtype=torch.int32).detach().numpy().copy()
out = loaded.predict({'src': input_tensor}) # error occured
print(out)
I got error.
Traceback (most recent call last):
File "/Users/ryosukefukatani/work/HMERModel/atnBTTR/d2.py", line 42, in <module>
out = loaded.predict({'src': input_tensor})
File "/Users/ryosukefukatani/work/HMERModel/venv/lib/python3.9/site-packages/coremltools/models/model.py", line 512, in predict
return self.__proxy__.predict(data, useCPUOnly)
RuntimeError: {
NSLocalizedDescription = "Failure dynamically resizing for sequence length.";
}
When I removed nn.LayerNorm(256), this code was passed.
How to fix it? or LayerNorm does not support flexible size?
Environment
- macOS 12.1
- coremltools 5.2
- torch 1.10.2
- Python 3.9.6
I found this code works fine for me.
with torch.no_grad():
tgt = torch.randint(3, 113, (1, 10, 5), dtype=torch.int32)
traced_model = torch.jit.trace(model, tgt)
tgt_shape = coremltools.Shape(shape=(1, coremltools.RangeDim(1, 15), 5))
coreml_model = coremltools.convert(
traced_model,
inputs=[coremltools.TensorType(shape=tgt_shape, dtype=int)]
)
coreml_model.save("decoder.mlmodel")
Is this expected behavior?