持久的hashlib状态

时间:2010-01-25 07:54:24

标签: python persistence hash pickle hashlib

我想创建一个hashlib实例,update(),然后以某种方式保持其状态。稍后,我想使用此状态数据重新创建对象,并继续update()它。最后,我想得到总累计数据运行的hexdigest()。状态持久性必须在多次运行中存活。

示例:

import hashlib
m = hashlib.sha1()
m.update('one')
m.update('two')
# somehow, persist the state of m here

#later, possibly in another process
# recreate m from the persisted state
m.update('three')
m.update('four')
print m.hexdigest()
# at this point, m.hexdigest() should be equal to hashlib.sha1().update('onetwothreefour').hextdigest()

修改

我在2010年找不到用python做这个的好方法,最后用C编写了一个小帮手应用程序来完成这个。但是,下面有一些很好的答案是当时我无法获知的。

5 个答案:

答案 0 :(得分:2)

您可以使用ctypes这样做, C 中不需要帮助应用: -

<强> rehash.py

#! /usr/bin/env python

''' A resumable implementation of SHA-256 using ctypes with the OpenSSL crypto library

    Written by PM 2Ring 2014.11.13
'''

from ctypes import *

SHA_LBLOCK = 16
SHA256_DIGEST_LENGTH = 32

class SHA256_CTX(Structure):
    _fields_ = [
        ("h", c_long * 8),
        ("Nl", c_long),
        ("Nh", c_long),
        ("data", c_long * SHA_LBLOCK),
        ("num", c_uint),
        ("md_len", c_uint)
    ]

HashBuffType = c_ubyte * SHA256_DIGEST_LENGTH

#crypto = cdll.LoadLibrary("libcrypto.so")
crypto = cdll.LoadLibrary("libeay32.dll" if os.name == "nt" else "libssl.so")

class sha256(object):
    digest_size = SHA256_DIGEST_LENGTH

    def __init__(self, datastr=None):
        self.ctx = SHA256_CTX()
        crypto.SHA256_Init(byref(self.ctx))
        if datastr:
            self.update(datastr)

    def update(self, datastr):
        crypto.SHA256_Update(byref(self.ctx), datastr, c_int(len(datastr)))

    #Clone the current context
    def _copy_ctx(self):
        ctx = SHA256_CTX()
        pointer(ctx)[0] = self.ctx
        return ctx

    def copy(self):
        other = sha256()
        other.ctx = self._copy_ctx()
        return other

    def digest(self):
        #Preserve context in case we get called before hashing is
        # really finished, since SHA256_Final() clears the SHA256_CTX
        ctx = self._copy_ctx()
        hashbuff = HashBuffType()
        crypto.SHA256_Final(hashbuff, byref(self.ctx))
        self.ctx = ctx
        return str(bytearray(hashbuff))

    def hexdigest(self):
        return self.digest().encode('hex')

#Tests
def main():
    import cPickle
    import hashlib

    data = ("Nobody expects ", "the spammish ", "imposition!")

    print "rehash\n"

    shaA = sha256(''.join(data))
    print shaA.hexdigest()
    print repr(shaA.digest())
    print "digest size =", shaA.digest_size
    print

    shaB = sha256()
    shaB.update(data[0])
    print shaB.hexdigest()

    #Test pickling
    sha_pickle = cPickle.dumps(shaB, -1)
    print "Pickle length:", len(sha_pickle)
    shaC = cPickle.loads(sha_pickle)

    shaC.update(data[1])
    print shaC.hexdigest()

    #Test copying. Note that copy can be pickled
    shaD = shaC.copy()

    shaC.update(data[2])
    print shaC.hexdigest()


    #Verify against hashlib.sha256()
    print "\nhashlib\n"

    shaD = hashlib.sha256(''.join(data))
    print shaD.hexdigest()
    print repr(shaD.digest())
    print "digest size =", shaD.digest_size
    print

    shaE = hashlib.sha256(data[0])
    print shaE.hexdigest()

    shaE.update(data[1])
    print shaE.hexdigest()

    #Test copying. Note that hashlib copy can NOT be pickled
    shaF = shaE.copy()
    shaF.update(data[2])
    print shaF.hexdigest()


if __name__ == '__main__':
    main()

<强> resumable_SHA-256.py

#! /usr/bin/env python

''' Resumable SHA-256 hash for large files using the OpenSSL crypto library

    The hashing process may be interrupted by Control-C (SIGINT) or SIGTERM.
    When a signal is received, hashing continues until the end of the
    current chunk, then the current file position, total file size, and
    the sha object is saved to a file. The name of this file is formed by
    appending '.hash' to the name of the file being hashed.

    Just re-run the program to resume hashing. The '.hash' file will be deleted
    once hashing is completed.

    Written by PM 2Ring 2014.11.14
'''

import cPickle as pickle
import os
import signal
import sys

import rehash

quit = False

blocksize = 1<<16   # 64kB
blocksperchunk = 1<<8

chunksize = blocksize * blocksperchunk

def handler(signum, frame):
    global quit
    print "\nGot signal %d, cleaning up." % signum
    quit = True


def do_hash(fname, filesize):
    hashname = fname + '.hash'
    if os.path.exists(hashname):
        with open(hashname, 'rb') as f:
            pos, fsize, sha = pickle.load(f)
        if fsize != filesize:
            print "Error: file size of '%s' doesn't match size recorded in '%s'" % (fname, hashname)
            print "%d != %d. Aborting" % (fsize, filesize)
            exit(1)
    else:
        pos, fsize, sha = 0, filesize, rehash.sha256()

    finished = False
    with open(fname, 'rb') as f:
        f.seek(pos)
        while not (quit or finished):
            for _ in xrange(blocksperchunk):
                block = f.read(blocksize)
                if block == '':
                    finished = True
                    break
                sha.update(block)

            pos += chunksize
            sys.stderr.write(" %6.2f%% of %d\r" % (100.0 * pos / fsize, fsize))
            if finished or quit:
                break

    if quit:
        with open(hashname, 'wb') as f:
            pickle.dump((pos, fsize, sha), f, -1)
    elif os.path.exists(hashname):
        os.remove(hashname)

    return (not quit), pos, sha.hexdigest()


def main():
    if len(sys.argv) != 2:
        print "Resumable SHA-256 hash of a file."
        print "Usage:\npython %s filename\n" % sys.argv[0]
        exit(1)

    fname = sys.argv[1]
    filesize = os.path.getsize(fname)

    signal.signal(signal.SIGINT, handler)
    signal.signal(signal.SIGTERM, handler)

    finished, pos, hexdigest = do_hash(fname, filesize)
    if finished:
        print "%s  %s" % (hexdigest, fname)
    else:
        print "sha-256 hash of '%s' incomplete" % fname
        print "%s" % hexdigest
        print "%d / %d bytes processed." % (pos, filesize)


if __name__ == '__main__':
    main()

<强>演示

import rehash
import pickle
sha=rehash.sha256("Hello ")
s=pickle.dumps(sha.ctx)
sha=rehash.sha256()
sha.ctx=pickle.loads(s)
sha.update("World")
print sha.hexdigest()

<强>输出

a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e

注意:我要感谢PM2Ring的精彩代码。

答案 1 :(得分:1)

hashlib.sha1是C库的包装器,因此您无法对其进行pickle。

需要实现Python的__getstate____setstate__方法才能访问其内部状态

如果sha1足够快,您可以使用sha1的pure Python实现

答案 2 :(得分:1)

我也遇到了这个问题,并且没有找到现有的解决方案,所以我最终编写了一个与Devesh Saini所描述的非常相似的库:https://github.com/kislyuk/rehash。例如:

import pickle, rehash
hasher = rehash.sha256(b"foo")
state = pickle.dumps(hasher)

hasher2 = pickle.loads(state)
hasher2.update(b"bar")

assert hasher2.hexdigest() == rehash.sha256(b"foobar").hexdigest()

答案 3 :(得分:0)

答案 4 :(得分:-1)

您可以轻松地在哈希对象周围构建一个包装器对象,该对象可以透明地保留数据。

明显的缺点是它需要完整保留散列数据以恢复状态 - 因此,根据您正在处理的数据大小,这可能不适合您的需要。但它应该可以正常工作几十MB。

不幸的是,hashlib没有将哈希算法暴露为正确的类,它会提供构造哈希对象的工厂函数 - 所以我们不能在没有加载保留符号的情况下正确地子类化 - 这是我宁愿避免的情况。这只意味着你必须从一开始就构建你的包装类,这不是Python的开销。

这是一个可能满足您需求的示例代码:

import hashlib
from cStringIO import StringIO

class PersistentSha1(object):
    def __init__(self, salt=""):
        self.__setstate__(salt)

    def update(self, data):
        self.__data.write(data)
        self.hash.update(data)

    def __getattr__(self, attr):
        return getattr(self.hash, attr)

    def __setstate__(self, salt=""):
        self.__data = StringIO()
        self.__data.write(salt)
        self.hash = hashlib.sha1(salt)

    def __getstate__(self):
        return self.data

    def _get_data(self):
        self.__data.seek(0)
        return self.__data.read()

    data = property(_get_data, __setstate__)

您可以访问“数据”成员本身来直接获取和设置状态,或者您可以使用python酸洗功能:

>>> a = PersistentSha1()
>>> a
<__main__.PersistentSha1 object at 0xb7d10f0c>
>>> a.update("lixo")
>>> a.data
'lixo'
>>> a.hexdigest()
'6d6332a54574aeb35dcde5cf6a8774f938a65bec'
>>> import pickle
>>> b = pickle.dumps(a)
>>>
>>> c = pickle.loads(b)
>>> c.hexdigest()
'6d6332a54574aeb35dcde5cf6a8774f938a65bec'

>>> c.data
'lixo'