PyMC3:Theano的cxxflags配置不正确

时间:2016-01-25 23:40:50

标签: python c++ theano pymc3

我一直在从pymc3运行find_MAP()。它一直给我同样的错误。

/tmp/ccVQvbdJ.s: Assembler messages:
/tmp/ccVQvbdJ.s:1326: Error: no such instruction: `vfnmadd312sd .LC14(%rip),%xmm2,%xmm1'
/tmp/ccVQvbdJ.s:1327: Error: no such instruction: `vfnmadd312sd .LC15(%rip),%xmm2,%xmm1'
/tmp/ccVQvbdJ.s:1626: Error: no such instruction: `vfnmadd312sd .LC14(%rip),%xmm2,%xmm1'
/tmp/ccVQvbdJ.s:1627: Error: no such instruction: `vfnmadd312sd .LC15(%rip),%xmm2,%xmm1'

这是python的例外:

---------------------------------------------------------------------------
Exception                                 Traceback (most recent call last)
<ipython-input-226-bcee72f62bd6> in <module>()
     10 
     11     # start = pm.find_MAP()
---> 12     start = pm.find_MAP(fmin = scipy.optimize.fmin_l_bfgs_b)
     13     step = pm.Metropolis()
     14 

/usr/local/lib/python2.7/dist-packages/pymc3/tuning/starting.pyc in find_MAP(start, vars, fmin, return_raw, disp, model, *args, **kwargs)
     68 
     69     logp = bij.mapf(model.fastlogp)
---> 70     dlogp = bij.mapf(model.fastdlogp(vars))
     71 
     72     def logp_o(point):

/usr/local/lib/python2.7/dist-packages/pymc3/model.pyc in fastdlogp(self, vars)
    147     def fastdlogp(self, vars=None):
    148         """Compiled log probability density gradient function"""
--> 149         return self.model.fastfn(gradient(self.logpt, vars))
    150 
    151     def fastd2logp(self, vars=None):

/usr/local/lib/python2.7/dist-packages/pymc3/memoize.pyc in memoizer(*args, **kwargs)
     12 
     13         if key not in cache:
---> 14             cache[key] = obj(*args, **kwargs)
     15 
     16         return cache[key]

/usr/local/lib/python2.7/dist-packages/pymc3/theanof.pyc in gradient(f, vars)
     52 
     53     if vars:
---> 54         return t.concatenate([gradient1(f, v) for v in vars], axis=0)
     55     else:
     56         return empty_gradient

/usr/local/lib/python2.7/dist-packages/pymc3/theanof.pyc in gradient1(f, v)
     42 def gradient1(f, v):
     43     """flat gradient of f wrt v"""
---> 44     return t.flatten(t.grad(f, v, disconnected_inputs='warn'))
     45 
     46 

/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in grad(cost, wrt, consider_constant, disconnected_inputs, add_names, known_grads, return_disconnected, null_gradients)
    559 
    560     rval = _populate_grad_dict(var_to_app_to_idx,
--> 561                                grad_dict, wrt, cost_name)
    562 
    563     for i in xrange(len(rval)):

/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in _populate_grad_dict(var_to_app_to_idx, grad_dict, wrt, cost_name)
   1322         return grad_dict[var]
   1323 
-> 1324     rval = [access_grad_cache(elem) for elem in wrt]
   1325 
   1326     return rval

/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_grad_cache(var)
   1277                     for idx in node_to_idx[node]:
   1278 
-> 1279                         term = access_term_cache(node)[idx]
   1280 
   1281                         if not isinstance(term, gof.Variable):

/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_term_cache(node)
    971             inputs = node.inputs
    972 
--> 973             output_grads = [access_grad_cache(var) for var in node.outputs]
    974 
    975             # list of bools indicating if each output is connected to the cost

/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_grad_cache(var)
   1277                     for idx in node_to_idx[node]:
   1278 
-> 1279                         term = access_term_cache(node)[idx]
   1280 
   1281                         if not isinstance(term, gof.Variable):

/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_term_cache(node)
    971             inputs = node.inputs
    972 
--> 973             output_grads = [access_grad_cache(var) for var in node.outputs]
    974 
    975             # list of bools indicating if each output is connected to the cost

/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_grad_cache(var)
   1277                     for idx in node_to_idx[node]:
   1278 
-> 1279                         term = access_term_cache(node)[idx]
   1280 
   1281                         if not isinstance(term, gof.Variable):

/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_term_cache(node)
    971             inputs = node.inputs
    972 
--> 973             output_grads = [access_grad_cache(var) for var in node.outputs]
    974 
    975             # list of bools indicating if each output is connected to the cost

/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_grad_cache(var)
   1277                     for idx in node_to_idx[node]:
   1278 
-> 1279                         term = access_term_cache(node)[idx]
   1280 
   1281                         if not isinstance(term, gof.Variable):

/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_term_cache(node)
    971             inputs = node.inputs
    972 
--> 973             output_grads = [access_grad_cache(var) for var in node.outputs]
    974 
    975             # list of bools indicating if each output is connected to the cost

/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_grad_cache(var)
   1277                     for idx in node_to_idx[node]:
   1278 
-> 1279                         term = access_term_cache(node)[idx]
   1280 
   1281                         if not isinstance(term, gof.Variable):

/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_term_cache(node)
    971             inputs = node.inputs
    972 
--> 973             output_grads = [access_grad_cache(var) for var in node.outputs]
    974 
    975             # list of bools indicating if each output is connected to the cost

/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_grad_cache(var)
   1277                     for idx in node_to_idx[node]:
   1278 
-> 1279                         term = access_term_cache(node)[idx]
   1280 
   1281                         if not isinstance(term, gof.Variable):

/usr/local/lib/python2.7/dist-packages/theano/gradient.pyc in access_term_cache(node)
   1111                                 str(g_shape))
   1112 
-> 1113                 input_grads = node.op.grad(inputs, new_output_grads)
   1114 
   1115                 if input_grads is None:

/usr/local/lib/python2.7/dist-packages/theano/tensor/elemwise.pyc in grad(self, inputs, ograds)
    676 
    677         # compute grad with respect to broadcasted input
--> 678         rval = self._bgrad(inputs, ograds)
    679 
    680         # TODO: make sure that zeros are clearly identifiable

/usr/local/lib/python2.7/dist-packages/theano/tensor/elemwise.pyc in _bgrad(self, inputs, ograds)
    789                 ret.append(None)
    790                 continue
--> 791             ret.append(transform(scalar_igrad))
    792 
    793         return ret

/usr/local/lib/python2.7/dist-packages/theano/tensor/elemwise.pyc in transform(r)
    781                 return DimShuffle((), ['x'] * nd, inplace=False)(res)
    782             new_r = Elemwise(node.op, {})(
--> 783                 *[transform(ipt) for ipt in node.inputs])
    784             return new_r
    785         ret = []

/usr/local/lib/python2.7/dist-packages/theano/tensor/elemwise.pyc in transform(r)
    781                 return DimShuffle((), ['x'] * nd, inplace=False)(res)
    782             new_r = Elemwise(node.op, {})(
--> 783                 *[transform(ipt) for ipt in node.inputs])
    784             return new_r
    785         ret = []

/usr/local/lib/python2.7/dist-packages/theano/gof/op.pyc in __call__(self, *inputs, **kwargs)
    662                 # compute output value once with test inputs to validate graph
    663                 thunk = node.op.make_thunk(node, storage_map, compute_map,
--> 664                                            no_recycling=[])
    665                 thunk.inputs = [storage_map[v] for v in node.inputs]
    666                 thunk.outputs = [storage_map[v] for v in node.outputs]

/usr/local/lib/python2.7/dist-packages/theano/gof/op.pyc in make_thunk(self, node, storage_map, compute_map, no_recycling)
    963             try:
    964                 return self.make_c_thunk(node, storage_map, compute_map,
--> 965                                          no_recycling)
    966             except (NotImplementedError, utils.MethodNotDefined):
    967                 logger.debug('Falling back on perform')

/usr/local/lib/python2.7/dist-packages/theano/gof/op.pyc in make_c_thunk(self, node, storage_map, compute_map, no_recycling)
    877         logger.debug('Trying CLinker.make_thunk')
    878         outputs = cl.make_thunk(input_storage=node_input_storage,
--> 879                                 output_storage=node_output_storage)
    880         fill_storage, node_input_filters, node_output_filters = outputs
    881 

/usr/local/lib/python2.7/dist-packages/theano/gof/cc.pyc in make_thunk(self, input_storage, output_storage, storage_map, keep_lock)
   1205         cthunk, in_storage, out_storage, error_storage = self.__compile__(
   1206             input_storage, output_storage, storage_map,
-> 1207             keep_lock=keep_lock)
   1208 
   1209         res = _CThunk(cthunk, init_tasks, tasks, error_storage)

/usr/local/lib/python2.7/dist-packages/theano/gof/cc.pyc in __compile__(self, input_storage, output_storage, storage_map, keep_lock)
   1150                                     output_storage,
   1151                                     storage_map,
-> 1152                                     keep_lock=keep_lock)
   1153         return (thunk,
   1154                 [link.Container(input, storage) for input, storage in

/usr/local/lib/python2.7/dist-packages/theano/gof/cc.pyc in cthunk_factory(self, error_storage, in_storage, out_storage, storage_map, keep_lock)
   1600         else:
   1601             module = get_module_cache().module_from_key(
-> 1602                 key=key, lnk=self, keep_lock=keep_lock)
   1603 
   1604         vars = self.inputs + self.outputs + self.orphans

/usr/local/lib/python2.7/dist-packages/theano/gof/cmodule.pyc in module_from_key(self, key, lnk, keep_lock)
   1172             try:
   1173                 location = dlimport_workdir(self.dirname)
-> 1174                 module = lnk.compile_cmodule(location)
   1175                 name = module.__file__
   1176                 assert name.startswith(location)

/usr/local/lib/python2.7/dist-packages/theano/gof/cc.pyc in compile_cmodule(self, location)
   1511                 lib_dirs=self.lib_dirs(),
   1512                 libs=libs,
-> 1513                 preargs=preargs)
   1514         except Exception as e:
   1515             e.args += (str(self.fgraph),)

/usr/local/lib/python2.7/dist-packages/theano/gof/cmodule.pyc in compile_str(module_name, src_code, location, include_dirs, lib_dirs, libs, preargs, py_module, hide_symbols)
   2209             # difficult to read.
   2210             raise Exception('Compilation failed (return status=%s): %s' %
-> 2211                             (status, compile_stderr.replace('\n', '. ')))
   2212         elif config.cmodule.compilation_warning and compile_stderr:
   2213             # Print errors just below the command line.

Exception: ("Compilation failed (return status=1): /tmp/ccVQvbdJ.s: Assembler messages:. /tmp/ccVQvbdJ.s:1326: Error: no such instruction: `vfnmadd312sd .LC14(%rip),%xmm2,%xmm1'. /tmp/ccVQvbdJ.s:1327: Error: no such instruction: `vfnmadd312sd .LC15(%rip),%xmm2,%xmm1'. /tmp/ccVQvbdJ.s:1626: Error: no such instruction: `vfnmadd312sd .LC14(%rip),%xmm2,%xmm1'. /tmp/ccVQvbdJ.s:1627: Error: no such instruction: `vfnmadd312sd .LC15(%rip),%xmm2,%xmm1'. ", '[Elemwise{scalar_psi}(beta)]')

我尝试了各种步骤:https://stackoverflow.com/a/10894096/3413239尝试各种cxxflags但没有任何效果。

由于我不熟悉Theano和C ++,我无法理解究竟是什么问题。任何帮助将受到高度赞赏。

vfnmadd312sd .LC14(%rip), %xmn2, %xmn1是什么意思?这些是什么说明?我在/tmp/中有很多目录,但文件看起来不像/tmp/cczHyg7w.s。这些文件的目的是什么?

我安装了PyMC3,Theano随之安装。通过PyMC3安装Theano会有问题吗?

修改

numpy和scipy配置存在问题。到目前为止我做了什么:

0 个答案:

没有答案