簡體   English   中英

Python-運行hdf5文件時崩潰

[英]Python - Crashing when running hdf5 files

我目前正在切換到使用終端打開python文件。 特別是,我正在測試用於確保所有內容都相應運行的腳本。

我正在嘗試打開某個文件時遇到麻煩。 它包含

from os.path import isfile
import numpy as np
import h5py

def gcPath(basePath,snapNum,chunkNum=0):
    """ Return absolute path to a group catalog HDF5 file (modify as needed). """
    gcPath = basePath + '/groups_%03d/' % snapNum
    filePath1 = gcPath + 'groups_%03d.%d.hdf5' % (snapNum, chunkNum)
    filePath2 = gcPath + 'fof_subhalo_tab_%03d.%d.hdf5' % (snapNum, chunkNum)

    if isfile(filePath1):
        return filePath1
    return filePath2

def offsetPath(basePath, snapNum):
    """ Return absolute path to a separate offset file (modify as needed). """
    offsetPath = basePath + '../postprocessing/offsets/offsets_%03d.hdf5' % snapNum

    return offsetPath

def loadObjects(basePath,snapNum,gName,nName,fields):
    """ Load either halo or subhalo information from the group catalog. """
    result = {}

    # make sure fields is not a single element
    if isinstance(fields, basestring):
        fields = [fields]

    # load header from first chunk
    with h5py.File(gcPath(basePath,snapNum),'r') as f:

        header = dict( f['Header'].attrs.items() )
        result['count'] = f['Header'].attrs['N'+nName+'_Total']

        if not result['count']:
            print 'warning: zero groups, empty return (snap='+str(snapNum)+').'
            return result

        # if fields not specified, load everything
        if not fields:
            fields = f[gName].keys()

        for field in fields:
            # verify existence
            if not field in f[gName].keys():
                raise Exception("Group catalog does not have requested field ["+field+"]!")

            # replace local length with global
            shape = list(f[gName][field].shape)
            shape[0] = result['count']

            # allocate within return dict
            result[field] = np.zeros( shape, dtype=f[gName][field].dtype )

    # loop over chunks
    wOffset = 0

    for i in range(header['NumFiles']):
        f = h5py.File(gcPath(basePath,snapNum,i),'r')

        if not f['Header'].attrs['N'+nName+'_ThisFile']:
            continue # empty file chunk

        # loop over each requested field
        for field in fields:
            # shape and type
            shape = f[gName][field].shape

            # read data local to the current file
            if len(shape) == 1:
                result[field][wOffset:wOffset+shape[0]] = f[gName][field][0:shape[0]]
            else:
                result[field][wOffset:wOffset+shape[0],:] = f[gName][field][0:shape[0],:]


        wOffset += shape[0]
        f.close()

    # only a single field? then return the array instead of a single item dict
    if len(fields) == 1:
        return result[fields[0]]

    return result

def loadSubhalos(basePath,snapNum,fields=None):
    """ Load all subhalo information from the entire group catalog for one snapshot
       (optionally restrict to a subset given by fields). """

    return loadObjects(basePath,snapNum,"Subhalo","subgroups",fields)

def loadHalos(basePath,snapNum,fields=None):
    """ Load all halo information from the entire group catalog for one snapshot
       (optionally restrict to a subset given by fields). """

    return loadObjects(basePath,snapNum,"Group","groups",fields)

def loadHeader(basePath,snapNum):
    """ Load the group catalog header. """
    with h5py.File(gcPath(basePath,snapNum),'r') as f:
        header = dict( f['Header'].attrs.items() )

    return header

def load(basePath,snapNum):
    """ Load complete group catalog all at once. """
    r = {}
    r['subhalos'] = loadSubhalos(basePath,snapNum)
    r['halos']    = loadHalos(basePath,snapNum)
    r['header']   = loadHeader(basePath,snapNum)
    return r

def loadSingle(basePath,snapNum,haloID=-1,subhaloID=-1):
    """ Return complete group catalog information for one halo or subhalo. """
    if (haloID < 0 and subhaloID < 0) or (haloID >= 0 and subhaloID >= 0):
        raise Exception("Must specify either haloID or subhaloID (and not both).")

    gName = "Subhalo" if subhaloID >= 0 else "Group"
    searchID = subhaloID if subhaloID >= 0 else haloID

     # old or new format
    if 'fof_subhalo' in gcPath(basePath,snapNum):
        # use separate 'offsets_nnn.hdf5' files
        with h5py.File(offsetPath(basePath,snapNum),'r') as f:
             offsets = f['FileOffsets/'+gName][()]
    else:
        # use header of group catalog
        with h5py.File(gcPath(basePath,snapNum),'r') as f:
        offsets = f['Header'].attrs['FileOffsets_'+gName]

    offsets = searchID - offsets
    fileNum = np.max( np.where(offsets >= 0) )
    groupOffset = offsets[fileNum]

    # load halo/subhalo fields into a dict
    result = {}

    with h5py.File(gcPath(basePath,snapNum,fileNum),'r') as f:
        for haloProp in f[gName].keys():
            result[haloProp] = f[gName][haloProp][groupOffset]

    return result

基本上,此文件的作用是從我的用戶目錄中檢索定義的hdf5文件以進行分析。

如果要通過我的python環境運行它,則返回

HDF5-DIAG: Error detected in HDF5 (1.8.13) thread 0:
  #000: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
    major: Datatype
    minor: Unable to initialize object
  #001: H5Tconv.c line 8622 in H5T__conv_double_ulong(): disagreement about datatype size
    major: Datatype
    minor: Unable to initialize object
  #002: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
    major: Datatype
    minor: Unable to initialize object
  #003: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
    major: Datatype
    minor: Unable to initialize object
  #004: H5Tconv.c line 8568 in H5T__conv_float_ulong(): disagreement about datatype size
    major: Datatype
    minor: Unable to initialize object
  #005: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
    major: Datatype
    minor: Unable to initialize object
  #006: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
    major: Datatype
    minor: Unable to initialize object
  #007: H5Tconv.c line 8650 in H5T__conv_ldouble_long(): disagreement about datatype size
    major: Datatype
    minor: Unable to initialize object
  #008: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
    major: Datatype
    minor: Unable to initialize object
  #009: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
    major: Datatype
    minor: Unable to initialize object
  #010: H5Tconv.c line 8595 in H5T__conv_double_long(): disagreement about datatype size
    major: Datatype
    minor: Unable to initialize object
  #011: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
    major: Datatype
    minor: Unable to initialize object
  #012: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
    major: Datatype
    minor: Unable to initialize object
  #013: H5Tconv.c line 8541 in H5T__conv_float_long(): disagreement about datatype size
    major: Datatype
    minor: Unable to initialize object
  #014: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
    major: Datatype
    minor: Unable to initialize object
  #015: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
    major: Datatype
    minor: Unable to initialize object
  #016: H5Tconv.c line 7858 in H5T__conv_ulong_ldouble(): disagreement about datatype size
    major: Datatype
    minor: Unable to initialize object
  #017: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
    major: Datatype
    minor: Unable to initialize object
  #018: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
    major: Datatype
    minor: Unable to initialize object
  #019: H5Tconv.c line 7831 in H5T__conv_ulong_double(): disagreement about datatype size
    major: Datatype
    minor: Unable to initialize object
  #020: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
    major: Datatype
    minor: Unable to initialize object
  #021: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
    major: Datatype
    minor: Unable to initialize object
  #022: H5Tconv.c line 7804 in H5T__conv_ulong_float(): disagreement about datatype size
    major: Datatype
    minor: Unable to initialize object
  #023: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
    major: Datatype
    minor: Unable to initialize object
  #024: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
    major: Datatype
    minor: Unable to initialize object
  #025: H5Tconv.c line 7777 in H5T__conv_long_ldouble(): disagreement about datatype size
    major: Datatype
    minor: Unable to initialize object
  #026: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
    major: Datatype
    minor: Unable to initialize object
  #027: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
    major: Datatype
    minor: Unable to initialize object
  #028: H5Tconv.c line 7751 in H5T__conv_long_double(): disagreement about datatype size
    major: Datatype
    minor: Unable to initialize object
  #029: H5T.c line 2340 in H5T_register(): unable to locate/allocate conversion path
    major: Datatype
    minor: Unable to initialize object
  #030: H5T.c line 4495 in H5T_path_find(): unable to initialize conversion function
    major: Datatype
    minor: Unable to initialize object
  #031: H5Tconv.c line 7726 in H5T__conv_long_float(): disagreement about datatype size
    major: Datatype
    minor: Unable to initialize object


Traceback (most recent call last):
      File "groupcat.py", line 6, in <module>
        import h5py
  File         "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/h5py/__init__.py", line 23, in <module>
from . import _conv
  File "h5py/h5t.pxd", line 14, in init h5py._conv (/Users/travis/build/MacPython/h5py-wheels/h5py/h5py/_conv.c:6961)
  File "h5py/h5t.pyx", line 139, in init h5py.h5t (/Users/travis/build/MacPython/h5py-wheels/h5py/h5py/h5t.c:20285)
  File "h5py/h5t.pyx", line 73, in h5py.h5t.lockid (/Users/travis/build/MacPython/h5py-wheels/h5py/h5py/h5t.c:2565)
  File "h5py/h5t.pyx", line 42, in h5py.h5t.typewrap (/Users/travis/build/MacPython/h5py-wheels/h5py/h5py/h5t.c:2199)
TypeError: Unable to initialize conversion function (Disagreement about datatype size)
Segmentation fault: 11

我以前使用過jupyter,它預先安裝了一個很大的軟件包列表,因此問題可能是我沒有安裝某些軟件包。 但是看回溯錯誤,看來問題出在h5py 但是已經安裝了h5py

您的示例代碼相當長。 一個更簡潔的示例將使其更容易提供幫助。

對於HDF5錯誤,通常最重要的錯誤是最先出現的錯誤,此處為“無法初始化轉換功能”。 HDF5文件中包含什么類型的數據? 它們可以映射到NumPy數據類型嗎?

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM