我目前正在切换到使用我的终端打开python文件。特别是,我正在测试我用来确保所有内容都相应运行的脚本。
我正在尝试打开它时遇到一些麻烦的文件。它包含
from os.path import isfile
import numpy as np
import h5py
def gcPath(basePath,snapNum,chunkNum=0):
""" Return absolute path to a group catalog HDF5 file (modify as needed). """
gcPath = basePath + '/groups_%03d/' % snapNum
filePath1 = gcPath + 'groups_%03d.%d.hdf5' % (snapNum, chunkNum)
filePath2 = gcPath + 'fof_subhalo_tab_%03d.%d.hdf5' % (snapNum, chunkNum)
if isfile(filePath1):
return filePath1
return filePath2
def offsetPath(basePath, snapNum):
""" Return absolute path to a separate offset file (modify as needed). """
offsetPath = basePath + '../postprocessing/offsets/offsets_%03d.hdf5' % snapNum
return offsetPath
def loadObjects(basePath,snapNum,gName,nName,fields):
""" Load either halo or subhalo information from the group catalog. """
result = {}
# make sure fields is not a single element
if isinstance(fields, basestring):
fields = [fields]
# load header from first chunk
with h5py.File(gcPath(basePath,snapNum),'r') as f:
header = dict( f['Header'].attrs.items() )
result['count'] = f['Header'].attrs['N'+nName+'_Total']
if not result['count']:
print 'warning: zero groups, empty return (snap='+str(snapNum)+').'
return result
# if fields not specified, load everything
if not fields:
fields = f[gName].keys()
for field in fields:
# verify existence
if not field in f[gName].keys():
raise Exception("Group catalog does not have requested field ["+field+"]!")
# replace local length with global
shape = list(f[gName][field].shape)
shape[0] = result['count']
# allocate within return dict
result[field] = np.zeros( shape, dtype=f[gName][field].dtype )
# loop over chunks
wOffset = 0
for i in range(header['NumFiles']):
f = h5py.File(gcPath(basePath,snapNum,i),'r')
if not f['Header'].attrs['N'+nName+'_ThisFile']:
continue # empty file chunk
# loop over each requested field
for field in fields:
# shape and type
shape = f[gName][field].shape
# read data local to the current file
if len(shape) == 1:
result[field][wOffset:wOffset+shape[0]] = f[gName][field][0:shape[0]]
else:
result[field][wOffset:wOffset+shape[0],:] = f[gName][field][0:shape[0],:]
wOffset += shape[0]
f.close()
# only a single field? then return the array instead of a single item dict
if len(fields) == 1:
return result[fields[0]]
return result
def loadSubhalos(basePath,snapNum,fields=None):
""" Load all subhalo information from the entire group catalog for one snapshot
(optionally restrict to a subset given by fields). """
return loadObjects(basePath,snapNum,"Subhalo","subgroups",fields)
def loadHalos(basePath,snapNum,fields=None):
""" Load all halo information from the entire group catalog for one snapshot
(optionally restrict to a subset given by fields). """
return loadObjects(basePath,snapNum,"Group","groups",fields)
def loadHeader(basePath,snapNum):
""" Load the group catalog header. """
with h5py.File(gcPath(basePath,snapNum),'r') as f:
header = dict( f['Header'].attrs.items() )
return header
def load(basePath,snapNum):
""" Load complete group catalog all at once. """
r = {}
r['subhalos'] = loadSubhalos(basePath,snapNum)
r['halos'] = loadHalos(basePath,snapNum)
r['header'] = loadHeader(basePath,snapNum)
return r
def loadSingle(basePath,snapNum,haloID=-1,subhaloID=-1):
""" Return complete group catalog information for one halo or subhalo. """
if (haloID < 0 and subhaloID < 0) or (haloID >= 0 and subhaloID >= 0):
raise Exception("Must specify either haloID or subhaloID (and not both).")
gName = "Subhalo" if subhaloID >= 0 else "Group"
searchID = subhaloID if subhaloID >= 0 else haloID
# old or new format
if 'fof_subhalo' in gcPath(basePath,snapNum):
# use separate 'offsets_nnn.hdf5' files
with h5py.File(offsetPath(basePath,snapNum),'r') as f:
offsets = f['FileOffsets/'+gName][()]
else:
# use header of group catalog
with h5py.File(gcPath(basePath,snapNum),'r') as f:
offsets = f['Header'].attrs['FileOffsets_'+gName]
offsets = searchID - offsets
fileNum = np.max( np.where(offsets >= 0) )
groupOffset = offsets[fileNum]
# load halo/subhalo fields into a dict
result = {}
with h5py.File(gcPath(basePath,snapNum,fileNum),'r') as f:
for haloProp in f[gName].keys():
result[haloProp] = f[gName][haloProp][groupOffset]
return result
基本上,这个文件的作用是它从我的用户目录中检索定义的hdf5文件以进行分析。
如果我通过我的python环境运行它,我将返回
HDF5-DIAG: Error detected in HDF5 (1.8.13) thread 0:
#000: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
major: Datatype
minor: Unable to initialize object
#001: H5Tconv.c line 8622 in H5T__conv_double_ulong(): disagreement about datatype size
major: Datatype
minor: Unable to initialize object
#002: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
major: Datatype
minor: Unable to initialize object
#003: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
major: Datatype
minor: Unable to initialize object
#004: H5Tconv.c line 8568 in H5T__conv_float_ulong(): disagreement about datatype size
major: Datatype
minor: Unable to initialize object
#005: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
major: Datatype
minor: Unable to initialize object
#006: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
major: Datatype
minor: Unable to initialize object
#007: H5Tconv.c line 8650 in H5T__conv_ldouble_long(): disagreement about datatype size
major: Datatype
minor: Unable to initialize object
#008: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
major: Datatype
minor: Unable to initialize object
#009: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
major: Datatype
minor: Unable to initialize object
#010: H5Tconv.c line 8595 in H5T__conv_double_long(): disagreement about datatype size
major: Datatype
minor: Unable to initialize object
#011: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
major: Datatype
minor: Unable to initialize object
#012: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
major: Datatype
minor: Unable to initialize object
#013: H5Tconv.c line 8541 in H5T__conv_float_long(): disagreement about datatype size
major: Datatype
minor: Unable to initialize object
#014: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
major: Datatype
minor: Unable to initialize object
#015: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
major: Datatype
minor: Unable to initialize object
#016: H5Tconv.c line 7858 in H5T__conv_ulong_ldouble(): disagreement about datatype size
major: Datatype
minor: Unable to initialize object
#017: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
major: Datatype
minor: Unable to initialize object
#018: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
major: Datatype
minor: Unable to initialize object
#019: H5Tconv.c line 7831 in H5T__conv_ulong_double(): disagreement about datatype size
major: Datatype
minor: Unable to initialize object
#020: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
major: Datatype
minor: Unable to initialize object
#021: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
major: Datatype
minor: Unable to initialize object
#022: H5Tconv.c line 7804 in H5T__conv_ulong_float(): disagreement about datatype size
major: Datatype
minor: Unable to initialize object
#023: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
major: Datatype
minor: Unable to initialize object
#024: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
major: Datatype
minor: Unable to initialize object
#025: H5Tconv.c line 7777 in H5T__conv_long_ldouble(): disagreement about datatype size
major: Datatype
minor: Unable to initialize object
#026: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
major: Datatype
minor: Unable to initialize object
#027: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
major: Datatype
minor: Unable to initialize object
#028: H5Tconv.c line 7751 in H5T__conv_long_double(): disagreement about datatype size
major: Datatype
minor: Unable to initialize object
#029: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
major: Datatype
minor: Unable to initialize object
#030: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
major: Datatype
minor: Unable to initialize object
#031: H5Tconv.c line 7726 in H5T__conv_long_float(): disagreement about datatype size
major: Datatype
minor: Unable to initialize object
Traceback (most recent call last):
File "groupcat.py", line 6, in <module>
import h5py
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/h5py/__init__.py", line 23, in <module>
from . import _conv
File "h5py/h5t.pxd", line 14, in init h5py._conv (/Users/travis/build/MacPython/h5py-wheels/h5py/h5py/_conv.c:6961)
File "h5py/h5t.pyx", line 139, in init h5py.h5t (/Users/travis/build/MacPython/h5py-wheels/h5py/h5py/h5t.c:20285)
File "h5py/h5t.pyx", line 73, in h5py.h5t.lockid (/Users/travis/build/MacPython/h5py-wheels/h5py/h5py/h5t.c:2565)
File "h5py/h5t.pyx", line 42, in h5py.h5t.typewrap (/Users/travis/build/MacPython/h5py-wheels/h5py/h5py/h5t.c:2199)
TypeError: Unable to initialize conversion function (Disagreement about datatype size)
Segmentation fault: 11
我之前使用过jupyter,它有一个预先安装好的软件包列表,所以问题可能是我没有安装某个软件包。但是看一下追溯错误,似乎问题是h5py
。但是已经安装了h5py
。
答案 0 :(得分:2)
您的示例代码相当长。一个更简洁的例子可以让你更容易提供帮助。
对于HDF5错误,通常最重要的错误是第一个出现,这里&#34;无法初始化转换功能&#34;。您在HDF5文件中有哪些类型的数据?它们可以映射到NumPy数据类型吗?