Python跨多个线程运行多个锁

时间:2014-07-08 03:00:30

标签: python multithreading

所以情况是我有多个方法,可能是simaltenously,但都需要自己的锁 反对重新线程直到它们运行。它们是通过使用一些数据处理选项初始化类来建立的:

class InfrequentDataDaemon(object): pass
class FrequentDataDaemon(object): pass

def addMethod(name): 
    def wrapper(f):
        setattr(processor, f.__name__, staticmethod(f))
        return f
    return wrapper

class DataProcessors(object): 
    lock = threading.Lock() 
    def __init__(self, options): 
        self.common_settings = options['common_settings']

        self.data_processing_configurations = options['data_processing_configurations'] #Configs for each processing method
        self.data_processing_types = options['data_processing_types'] 
        self.Data_Processsing_Functions ={}

        #I __init__ each processing method as a seperate function so that it can be locked
        for type in options['data_processing_types']: 
            def bindFunction1(name):
                def func1(self, data=None, lock=None):
                    config = self.data_processing_configurations[data['type']] #I get the right config for the datatype
                    with lock:
                        FetchDataBaseStuff(data['type'])
                         #I don't want this to be run more than once at a time per DataProcessing Type
                         # But it's fine if multiple DoSomethings run at once, as long as each DataType is different!
                        DoSomething(data, config) 
                        WriteToDataBase(data['type'])
                func1.__name__ = "Processing_for_{}".format(type)
                self.Data_Processing_Functions[func1.__name__] = func1 #Add this function to the Dictinary object
           bindFunction1(type)

        #Then I add some methods to a daemon that are going to check if our Dataprocessors need to be called
        def fast_process_types(data): 
            if not example_condition is True: return
            if not data['type'] in self.data_processing_types: return #Check that we are doing something with this type of data
            threading.Thread(target=self.Data_Processing_Functions["Processing_for_{}".format(data['type'])], args=(self,data, lock)).start()

        def slow_process_types(data): 
            if not some_other_condition is True: return
            if not data['type'] in self.data_processing_types: return #Check that we are doing something with this type of data
            threading.Thread(target=self.Data_Processing_Functions["Processing_for_{}".format(data['type'])], args=(self,data, lock)).start()

        addMethod(InfrequentDataDaemon)(slow_process_types)
        addMethod(FrequentDataDaemon)(fast_process_types)

对于巨大的代码墙感到抱歉(我已经改变它/尽可能多地删除它)。再次想法是锁定每个方法 DataProcessors.Data_Processing_Functions - 这样每个方法一次只能由一个线程访问(同一方法的其余线程排队)。如何设置锁定才能达到此效果?

1 个答案:

答案 0 :(得分:1)

我不确定我是否完全遵循您在此处尝试的操作,但您是否可以为每种类型创建单独的threading.Lock对象?

class DataProcessors(object): 
    def __init__(self, options): 
        self.common_settings = options['common_settings']

        self.data_processing_configurations = options['data_processing_configurations'] #Configs for each processing method
        self.data_processing_types = options['data_processing_types'] 
        self.Data_Processsing_Functions ={}
        self.locks = {}

        #I __init__ each processing method as a seperate function so that it can be locked
        for type in options['data_processing_types']: 
            self.locks[type] = threading.Lock()
            def bindFunction1(name):
                def func1(self, data=None):
                    config = self.data_processing_configurations[data['type']] #I get the right config for the datatype
                    with self.locks[data['type']]:
                        FetchDataBaseStuff(data['type'])
                        DoSomething(data, config) 
                        WriteToDataBase(data['type'])
                func1.__name__ = "Processing_for_{}".format(type)
                self.Data_Processing_Functions[func1.__name__] = func1 #Add this function to the Dictinary object
           bindFunction1(type)

        #Then I add some methods to a daemon that are going to check if our Dataprocessors need to be called
        def fast_process_types(data): 
            if not example_condition is True: return
            if not data['type'] in self.data_processing_types: return #Check that we are doing something with this type of data
            threading.Thread(target=self.Data_Processing_Functions["Processing_for_{}".format(data['type'])], args=(self,data)).start()

        def slow_process_types(data): 
            if not some_other_condition is True: return
            if not data['type'] in self.data_processing_types: return #Check that we are doing something with this type of data
            threading.Thread(target=self.Data_Processing_Functions["Processing_for_{}".format(data['type'])], args=(self,data)).start()

        addMethod(InfrequentDataDaemon)(slow_process_types)
        addMethod(FrequentDataDaemon)(fast_process_types)