왜 Core ML을 써야 하는가?
Create ML 앱을 써도 되지만, 다른 프레임워크의 모델을 컨버팅할 수 있는 coremltools도 제공한다.
기존에는 외부 의존성을 수동으로 가져다 썼어야 했으나, 이제는 coremltools하나로 해결된다.
MIL(Model Intermediation Language) - coremltools 안에서 쓰는 DSL
## Tensorflow2
import tensorflow as tf
tf_model = tf.keras.applications.MobileNet()
mlmodel = ct.convert(tf_model)
## pyTorch
import torch
import torchVision
torch_model = torchVision.models.mobilenet_v2()
torch_modee.eval()
example_input = torch.rand(1,3,256,256)
traced_model = torch.jit.trace(torch_model, example_input)
mlmodel = ct.convert(trace_model, inputs=[ct.TensorType(shape=example_input.shape])
## TensorFlow1
import coremlTools as ct
import tensorFlow as tf
mlmodel = ct.convert("mobilenet_frozen_graph.pb",
inputs=[ct.ImageType(bias = [-1, -1, -1]). scale=1/127)],
classifier_config=ct.ClassifierConfig("labels.txt))
더 복잡한 모델을 바꿔보자
만약 지원하지 않는 동작을 만났을 때는 어떻게 처리되나?
MIL이 어떻게 돌아가길래 이게 되지?
## MIL 번역 예제
from coremltools.converters.mil import Builder as mb
@mb.program(input_specs=[mb.TensorSpec(shape=(1, 100, 100, 3))])
def prog(x):
x = mb.relu(x=x, name='x')
x = mb.transpose(x=x, perm=[0, 3, 1, 2], name='transpose')
x = mb.reduce_mean(x=x axes=[2,3], keep_dims=False, name='reduce')
x = mb.log(x=x, name='log')
return x
#print(prog)
main(%x: (1, 100, 100, 3, fp32)) {
block0() {
%x: (1, 100, 100, 3, fp32) = relu(x%x, name="x")
%transpose_perm_0: (4, i32)* = const(val=[0, 3, 1, 2], name="transpose_perm_0")
%transpose: (1, 3, 100, 100, fp32) = transpose(x=%x, perm=%transpose_perm_0, name="transpose")
%reduce_axes_0: (2, i32)* = const(val=[2, 3], name="reduce_axes_0")
%reduce_keep_dims_0: (bool)* = const(val=False, name="reduce_keep_dims_0")
%reduce: (1, 3, fp32) = reduce_mean(x=%transpose, axes=%reduce_axes_0, keep_dims=%reduce_keep_dims_0, name="reduce")
%log: (1, 3, fp32) = log(x=%reduce, name="log")
} -> (%log)
}
# builder 예제
import coremltools.converters.mil import builder as mb
import coremltools.converters.mil import register_tf_op
@register_tf_op
def Einsum(context, node):
assert node.attr['equation'] == 'bnqd,bnkd->bnqk'
a = context[node.inputs[0]]
b = context[node.inputs[1]]
x = mb.matmul(x=a, y=b, transpose_x=False, transpose_y=True, name=node.name)
context.add(node.name, x)