我一直在从 pymc3 运行 find_MAP() 。它一直给我同样的错误。
/tmp/ccVQvbdJ.s: Assembler messages:
/tmp/ccVQvbdJ.s:1326: Error: no such instruction: `vfnmadd312sd .LC14(%rip),%xmm2,%xmm1'
/tmp/ccVQvbdJ.s:1327: Error: no such instruction: `vfnmadd312sd .LC15(%rip),%xmm2,%xmm1'
/tmp/ccVQvbdJ.s:1626: Error: no such instruction: `vfnmadd312sd .LC14(%rip),%xmm2,%xmm1'
/tmp/ccVQvbdJ.s:1627: Error: no such instruction: `vfnmadd312sd .LC15(%rip),%xmm2,%xmm1'
这是python的例外:
---------------------------------------------------------------------------
Exception Traceback (most recent call last)
<ipython-input-226-bcee72f62bd6> in <module>()
10
11 # start = pm.find_MAP()
---> 12 start = pm.find_MAP(fmin = scipy.optimize.fmin_l_bfgs_b)
13 step = pm.Metropolis()
14
/usr/local/lib/python2.7/dist-packages/pymc3/tuning/starting.pyc in find_MAP(start, vars, fmin, return_raw, disp, model, *args, **kwargs)
68
69 logp = bij.mapf(model.fastlogp)
---> 70 dlogp = bij.mapf(model.fastdlogp(vars))
71
72 def logp_o(point):
/usr/local/lib/python2.7/dist-packages/pymc3/model.pyc in fastdlogp(self, vars)
147 def fastdlogp(self, vars=None):
148 """Compiled log probability density gradient function"""
--> 149 return self.model.fastfn(gradient(self.logpt, vars))
150
151 def fastd2logp(self, vars=None):
/usr/local/lib/python2.7/dist-packages/pymc3/memoize.pyc in memoizer(*args, **kwargs)
12
13 if key not in cache:
---> 14 cache[key] = obj(*args, **kwargs)
15
16 return cache[key]
/usr/local/lib/python2.7/dist-packages/pymc3/theanof.pyc in gradient(f, vars)
52
53 if vars:
---> 54 return t.concatenate([gradient1(f, v) for v in vars], axis=0)
55 else:
56 return empty_gradient
/usr/local/lib/python2.7/dist-packages/pymc3/theanof.pyc in gradient1(f, v)
42 def gradient1(f, v):
43 """flat gradient of f wrt v"""
---> 44 return t.flatten(t.grad(f, v, disconnected_inputs='warn'))
45
46
/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in grad(cost, wrt, consider_constant, disconnected_inputs, add_names, known_grads, return_disconnected, null_gradients)
559
560 rval = _populate_grad_dict(var_to_app_to_idx,
--> 561 grad_dict, wrt, cost_name)
562
563 for i in xrange(len(rval)):
/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in _populate_grad_dict(var_to_app_to_idx, grad_dict, wrt, cost_name)
1322 return grad_dict[var]
1323
-> 1324 rval = [access_grad_cache(elem) for elem in wrt]
1325
1326 return rval
/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_grad_cache(var)
1277 for idx in node_to_idx[node]:
1278
-> 1279 term = access_term_cache(node)[idx]
1280
1281 if not isinstance(term, gof.Variable):
/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_term_cache(node)
971 inputs = node.inputs
972
--> 973 output_grads = [access_grad_cache(var) for var in node.outputs]
974
975 # list of bools indicating if each output is connected to the cost
/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_grad_cache(var)
1277 for idx in node_to_idx[node]:
1278
-> 1279 term = access_term_cache(node)[idx]
1280
1281 if not isinstance(term, gof.Variable):
/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_term_cache(node)
971 inputs = node.inputs
972
--> 973 output_grads = [access_grad_cache(var) for var in node.outputs]
974
975 # list of bools indicating if each output is connected to the cost
/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_grad_cache(var)
1277 for idx in node_to_idx[node]:
1278
-> 1279 term = access_term_cache(node)[idx]
1280
1281 if not isinstance(term, gof.Variable):
/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_term_cache(node)
971 inputs = node.inputs
972
--> 973 output_grads = [access_grad_cache(var) for var in node.outputs]
974
975 # list of bools indicating if each output is connected to the cost
/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_grad_cache(var)
1277 for idx in node_to_idx[node]:
1278
-> 1279 term = access_term_cache(node)[idx]
1280
1281 if not isinstance(term, gof.Variable):
/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_term_cache(node)
971 inputs = node.inputs
972
--> 973 output_grads = [access_grad_cache(var) for var in node.outputs]
974
975 # list of bools indicating if each output is connected to the cost
/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_grad_cache(var)
1277 for idx in node_to_idx[node]:
1278
-> 1279 term = access_term_cache(node)[idx]
1280
1281 if not isinstance(term, gof.Variable):
/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_term_cache(node)
971 inputs = node.inputs
972
--> 973 output_grads = [access_grad_cache(var) for var in node.outputs]
974
975 # list of bools indicating if each output is connected to the cost
/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_grad_cache(var)
1277 for idx in node_to_idx[node]:
1278
-> 1279 term = access_term_cache(node)[idx]
1280
1281 if not isinstance(term, gof.Variable):
/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_term_cache(node)
1111 str(g_shape))
1112
-> 1113 input_grads = node.op.grad(inputs, new_output_grads)
1114
1115 if input_grads is None:
/usr/local/lib/python2.7/dist-packages/theano/tensor/elemwise.pyc in grad(self, inputs, ograds)
676
677 # compute grad with respect to broadcasted input
--> 678 rval = self._bgrad(inputs, ograds)
679
680 # TODO: make sure that zeros are clearly identifiable
/usr/local/lib/python2.7/dist-packages/theano/tensor/elemwise.pyc in _bgrad(self, inputs, ograds)
789 ret.append(None)
790 continue
--> 791 ret.append(transform(scalar_igrad))
792
793 return ret
/usr/local/lib/python2.7/dist-packages/theano/tensor/elemwise.pyc in transform(r)
781 return DimShuffle((), ['x'] * nd, inplace=False)(res)
782 new_r = Elemwise(node.op, {})(
--> 783 *[transform(ipt) for ipt in node.inputs])
784 return new_r
785 ret = []
/usr/local/lib/python2.7/dist-packages/theano/tensor/elemwise.pyc in transform(r)
781 return DimShuffle((), ['x'] * nd, inplace=False)(res)
782 new_r = Elemwise(node.op, {})(
--> 783 *[transform(ipt) for ipt in node.inputs])
784 return new_r
785 ret = []
/usr/local/lib/python2.7/dist-packages/theano/gof/op.pyc in __call__(self, *inputs, **kwargs)
662 # compute output value once with test inputs to validate graph
663 thunk = node.op.make_thunk(node, storage_map, compute_map,
--> 664 no_recycling=[])
665 thunk.inputs = [storage_map[v] for v in node.inputs]
666 thunk.outputs = [storage_map[v] for v in node.outputs]
/usr/local/lib/python2.7/dist-packages/theano/gof/op.pyc in make_thunk(self, node, storage_map, compute_map, no_recycling)
963 try:
964 return self.make_c_thunk(node, storage_map, compute_map,
--> 965 no_recycling)
966 except (NotImplementedError, utils.MethodNotDefined):
967 logger.debug('Falling back on perform')
/usr/local/lib/python2.7/dist-packages/theano/gof/op.pyc in make_c_thunk(self, node, storage_map, compute_map, no_recycling)
877 logger.debug('Trying CLinker.make_thunk')
878 outputs = cl.make_thunk(input_storage=node_input_storage,
--> 879 output_storage=node_output_storage)
880 fill_storage, node_input_filters, node_output_filters = outputs
881
/usr/local/lib/python2.7/dist-packages/theano/gof/cc.pyc in make_thunk(self, input_storage, output_storage, storage_map, keep_lock)
1205 cthunk, in_storage, out_storage, error_storage = self.__compile__(
1206 input_storage, output_storage, storage_map,
-> 1207 keep_lock=keep_lock)
1208
1209 res = _CThunk(cthunk, init_tasks, tasks, error_storage)
/usr/local/lib/python2.7/dist-packages/theano/gof/cc.pyc in __compile__(self, input_storage, output_storage, storage_map, keep_lock)
1150 output_storage,
1151 storage_map,
-> 1152 keep_lock=keep_lock)
1153 return (thunk,
1154 [link.Container(input, storage) for input, storage in
/usr/local/lib/python2.7/dist-packages/theano/gof/cc.pyc in cthunk_factory(self, error_storage, in_storage, out_storage, storage_map, keep_lock)
1600 else:
1601 module = get_module_cache().module_from_key(
-> 1602 key=key, lnk=self, keep_lock=keep_lock)
1603
1604 vars = self.inputs + self.outputs + self.orphans
/usr/local/lib/python2.7/dist-packages/theano/gof/cmodule.pyc in module_from_key(self, key, lnk, keep_lock)
1172 try:
1173 location = dlimport_workdir(self.dirname)
-> 1174 module = lnk.compile_cmodule(location)
1175 name = module.__file__
1176 assert name.startswith(location)
/usr/local/lib/python2.7/dist-packages/theano/gof/cc.pyc in compile_cmodule(self, location)
1511 lib_dirs=self.lib_dirs(),
1512 libs=libs,
-> 1513 preargs=preargs)
1514 except Exception as e:
1515 e.args += (str(self.fgraph),)
/usr/local/lib/python2.7/dist-packages/theano/gof/cmodule.pyc in compile_str(module_name, src_code, location, include_dirs, lib_dirs, libs, preargs, py_module, hide_symbols)
2209 # difficult to read.
2210 raise Exception('Compilation failed (return status=%s): %s' %
-> 2211 (status, compile_stderr.replace('\n', '. ')))
2212 elif config.cmodule.compilation_warning and compile_stderr:
2213 # Print errors just below the command line.
Exception: ("Compilation failed (return status=1): /tmp/ccVQvbdJ.s: Assembler messages:. /tmp/ccVQvbdJ.s:1326: Error: no such instruction: `vfnmadd312sd .LC14(%rip),%xmm2,%xmm1'. /tmp/ccVQvbdJ.s:1327: Error: no such instruction: `vfnmadd312sd .LC15(%rip),%xmm2,%xmm1'. /tmp/ccVQvbdJ.s:1626: Error: no such instruction: `vfnmadd312sd .LC14(%rip),%xmm2,%xmm1'. /tmp/ccVQvbdJ.s:1627: Error: no such instruction: `vfnmadd312sd .LC15(%rip),%xmm2,%xmm1'. ", '[Elemwise{scalar_psi}(beta)]')
我已经尝试了这里提到的各种步骤:https : //stackoverflow.com/a/10894096/3413239 通过尝试各种 cxxflags 但没有任何效果。
由于我不熟悉 Theano 和 C++,我无法理解到底是什么问题。任何帮助将不胜感激。
是什么vfnmadd312sd .LC14(%rip), %xmn2, %xmn1
意思?这些是什么指令?我有很多目录,/tmp/
但没有看起来像 : 的文件/tmp/cczHyg7w.s
。这些文件的目的是什么?
我安装了 PyMC3,Theano 也随之安装。通过 PyMC3 安装 Theano 会不会有问题?
编辑
numpy 和 scipy 配置存在问题。到目前为止我做了什么:
- 卸载 numpy 和 scipy
- 使用正确的配置和 OpenBlas 的链接重建 numpy。使用 OpenBLAS 集成编译 numpy
- 通过 pip 重新安装了 scipy、theano、pymc3 它仍然给我同样的问题。