熊猫,HDF5,不工作

时间:2013-12-22 19:41:37

标签: python pandas hdf5

我正在尝试使用HDF5表保存pandas数据帧,但这样做时会遇到很多错误。

我正在使用pandas 0.11.0

以下是我的数据框的说明:

print( df )
<class 'pandas.core.frame.DataFrame'>
DatetimeIndex: 3269 entries, 2000-01-03 00:00:00 to 2012-12-31 00:00:00
Data columns (total 6 columns):
Open         3269  non-null values
High         3269  non-null values
Low          3269  non-null values
Close        3269  non-null values
Volume       3269  non-null values
Adj Close    3269  non-null values
dtypes: float64(5), int64(1)

然后我尝试将其打印出来:

df.to_hdf( 'c:/temp/test.h5','df', append=False )

然后我收到一条很长的错误信息:

C:\Anaconda\envs\p33\lib\site-packages\pandas\core\generic.py in to_hdf(self, path_or_buf, key, **kwargs)
    486         """ activate the HDFStore """
    487         from pandas.io import pytables
--> 488         return pytables.to_hdf(path_or_buf, key, self, **kwargs)
    489 
    490 # install the indexerse

C:\Anaconda\envs\p33\lib\site-packages\pandas\io\pytables.py in to_hdf(path_or_buf, key, value, mode, complevel, complib, **kwargs)
    156     if isinstance(path_or_buf, str):
    157         with get_store(path_or_buf, mode=mode, complevel=complevel, complib=complib) as store:
--> 158             f(store)
    159     else:
    160         f(path_or_buf)

C:\Anaconda\envs\p33\lib\site-packages\pandas\io\pytables.py in <lambda>(store)
    152 def to_hdf(path_or_buf, key, value, mode=None, complevel=None, complib=None, **kwargs):
    153     """ store this object, close it if we opened it """
--> 154     f = lambda store: store.put(key, value, **kwargs)
    155 
    156     if isinstance(path_or_buf, str):

C:\Anaconda\envs\p33\lib\site-packages\pandas\io\pytables.py in put(self, key, value, table, append, **kwargs)
    534             table
    535         """
--> 536         self._write_to_group(key, value, table=table, append=append, **kwargs)
    537 
    538     def remove(self, key, where=None, start=None, stop=None):

C:\Anaconda\envs\p33\lib\site-packages\pandas\io\pytables.py in _write_to_group(self, key, value, index, table, append, complib, **kwargs)
    869             raise ValueError('Compression not supported on non-table')
    870 
--> 871         s.write(obj = value, append=append, complib=complib, **kwargs)
    872         if s.is_table and index:
    873             s.create_index(columns = index)

C:\Anaconda\envs\p33\lib\site-packages\pandas\io\pytables.py in write(self, obj, **kwargs)
   1996         self.attrs.ndim = data.ndim
   1997         for i, ax in enumerate(data.axes):
-> 1998             self.write_index('axis%d' % i, ax)
   1999 
   2000         # Supporting mixed-type DataFrame objects...nontrivial

C:\Anaconda\envs\p33\lib\site-packages\pandas\io\pytables.py in write_index(self, key, index)
   1633             setattr(self.attrs, '%s_variety' % key, 'regular')
   1634             converted = _convert_index(index).set_name('index')
-> 1635             self.write_array(key, converted.values)
   1636             node = getattr(self.group, key)
   1637             node._v_attrs.kind = converted.kind

C:\Anaconda\envs\p33\lib\site-packages\pandas\io\pytables.py in write_array(self, key, value)
   1805                 self.write_array_empty(key, value)
   1806             else:
-> 1807                 self._handle.createArray(self.group, key, value)
   1808 
   1809         getattr(self.group, key)._v_attrs.transposed = transposed

C:\Anaconda\envs\p33\lib\site-packages\tables\_past.py in oldfunc(*args, **kwargs)
     33     def oldfunc(*args, **kwargs):
     34         warn(warnmsg, PendingDeprecationWarning, stacklevel=2)
---> 35         return obj(*args, **kwargs)
     36     oldfunc.__doc__ = (
     37         obj.__doc__ or '') + "\n\n.. warning::\n\n    " + warnmsg + "\n"

C:\Anaconda\envs\p33\lib\site-packages\tables\file.py in create_array(self, where, name, obj, title, byteorder, createparents, atom, shape)
    890         parentnode = self._get_or_create_path(where, createparents)
    891         return Array(parentnode, name,
--> 892                      obj=obj, title=title, byteorder=byteorder)
    893 
    894     createArray = previous_api(create_array)

C:\Anaconda\envs\p33\lib\site-packages\tables\array.py in __init__(self, parentnode, name, obj, title, byteorder, _log, _atom)
    185         # Ordinary arrays have no filters: leaf is created with default ones.
    186         super(Array, self).__init__(parentnode, name, new, Filters(),
--> 187                                     byteorder, _log)
    188 
    189     def _g_create(self):

C:\Anaconda\envs\p33\lib\site-packages\tables\leaf.py in __init__(self, parentnode, name, new, filters, byteorder, _log)
    260         # is a lazy property that automatically handles their loading.
    261 
--> 262         super(Leaf, self).__init__(parentnode, name, _log)
    263 
    264     def __len__(self):

C:\Anaconda\envs\p33\lib\site-packages\tables\node.py in __init__(self, parentnode, name, _log)
    264             #   Create or open the node and get its object ID.
    265             if new:
--> 266                 self._v_objectid = self._g_create()
    267             else:
    268                 self._v_objectid = self._g_open()

C:\Anaconda\envs\p33\lib\site-packages\tables\array.py in _g_create(self)
    202         # Raise an error in case of unsupported object
    203         if nparr.dtype.kind in ['V', 'U', 'O']:  # in void, unicode, object
--> 204             raise TypeError("Array objects cannot currently deal with void, "
    205                             "unicode or object arrays")
    206 

TypeError: Array objects cannot currently deal with void, unicode or object arrays

0 个答案:

没有答案