A error occurred when I run a code snippet which is a small *self-attenion* 
implementation.
I also disable 'AlterOpLayout' pass,it doesn't work. Is my code wrong?How to 
modify it.
Here is my source code
```
from tvm import relay
from tvm.relay import testing
import tvm
from tvm import te
from tvm.contrib import graph_executor
import numpy as np

def dense(data, weight=None, units=None, **kwargs):
    name = kwargs.get("name")
    kwargs.pop("name")
    if not weight:
        weight = relay.var(name + "_weight")
    ans = relay.nn.dense(data, weight, units, **kwargs)
    return ans

def batch_matmul(data, weight=None,**kwargs):
        return relay.nn.batch_matmul(data,weight)

def bert(seq_lenth=64,hidden_dims=1024,heads=64):
        data = relay.var("data", shape=(seq_lenth,hidden_dims), dtype='float32')
        body = relay.nn.relu(data=data)

        # attention
        d_k = hidden_dims // heads
        q = dense(data,units=hidden_dims,name="q")
        k = dense(data,units=hidden_dims,name="k")
        v = dense(data,units=hidden_dims,name="v")

        q = relay.reshape(q,(-1,heads,d_k))
        k = relay.reshape(k,(-1,heads,d_k))
        q = relay.reshape(q,(-1,heads,d_k))

        q = relay.transpose(q,(1,0,2))
        k = relay.transpose(k,(1,0,2))
        v = relay.transpose(v,(1,2,0))

        logits = batch_matmul(q,k,name='logits')
        output = batch_matmul(logits,v,name='output')

        output = relay.transpose(output,(1,0,2))
        output = relay.reshape(output,(seq_lenth,hidden_dims))

        net = dense(output,units=hidden_dims,name='net')

        return relay.Function(relay.analysis.free_vars(net), net)

def get_workload():
        net = bert()
        mod = tvm.IRModule.from_expr(net)
        mod = relay.transform.InferType()(mod)
        shape_dict = {v.name_hint: v.checked_type for v in mod["main"].params}
        np.random.seed(0)
        params = {}
        for k, v in shape_dict.items():
            if k == "data":
                continue
            init_value = np.random.uniform(-1, 1, 
v.concrete_shape).astype(v.dtype)
            params[k] = tvm.nd.array(init_value, device=tvm.cpu(0))
        return mod,params

if __name__=='__main__':
        mod,params = get_workload()
        data_shape = (64,1024)

        opt_level = 4
        target = tvm.target.cuda()

        with 
tvm.transform.PassContext(opt_level=opt_level,disabled_pass={"AlterOpLayout"}):
                lib = relay.build(mod, target, params=params)


        dev = tvm.cuda()
        data = np.random.uniform(-1, 1, size=data_shape).astype("float32")
        # create module
        module = graph_executor.GraphModule(lib["default"](dev))
        # set input and parameters
        module.set_input("data", data)
        # run
        print(module.benchmark(dev, number=1, repeat=600))
```
And error message

```
Check failed: (!axes.defined() || static_cast<int>(axes.size()) == ndim) is 
false: Dimension mismatch: axes has 3 elements, but data.ndim = 2
```

And I think it failed at
```
output = batch_matmul(logits,v,name='output')
```

Thanks a lot!





---
[Visit Topic](https://discuss.tvm.apache.org/t/dimension-mismatch/12183/1) to 
respond.

You are receiving this because you enabled mailing list mode.

To unsubscribe from these emails, [click 
here](https://discuss.tvm.apache.org/email/unsubscribe/f25a633945cf228a9ea4a8d3e816e6dfe174bcca05916debe334a7ac27290874).

Reply via email to