几个函数的数据处理是异步的

时间:2017-10-02 04:17:50

标签: asynchronous go

我有通过http收到的数据,这些数据需要由两个不同的函数处理。重要的是它们按顺序由每个功能处理。在文件中,例如:1,2,3,4,5。数据库还记录了1,2,3,4,5。作为一个fifo模型。 现在我遇到了这样的问题......我的数据一直在运行,有时数据库可以满足我长时间更新数据的请求,因此我无法及时更新文件。 对我来说,重要的是数据已经被添加到文件或数据库中。我可以使用缓冲通道,但我不知道有多少数据可以等待队列中的处理,我不想指出缓冲区的大小肯定很大。 我尝试在NewData函数中添加更多goroutine,但在这种情况下,我的数据不是按顺序写入的。

此代码显示问题。

package main

import (
    "fmt"
    "time"
)

type procHandler interface {
    Start()
    NewData(newdata []byte)
}

type fileWriter struct {
    Data chan []byte
}

func (proc *fileWriter) Start() {
    proc.Data = make(chan []byte)
    go func() {
        for {
            obj := <-proc.Data

            fmt.Printf("proc %T ", proc)
            fmt.Println(obj)
        }
    }()
}

func (proc *fileWriter) NewData(newdata []byte) {
    proc.Data <- newdata
}

type sqlWriter struct {
    Data chan []byte
}

func (proc *sqlWriter) Start() {
    proc.Data = make(chan []byte)
    go func() {
        for {
            obj := <-proc.Data
            time.Sleep(5 * time.Second)
            fmt.Printf("proc %T ", proc)
            fmt.Println(obj)
        }
    }()
}

func (proc *sqlWriter) NewData(newdata []byte) {
    proc.Data <- newdata
}

var processors = []procHandler{}

func receiver() {
    newDataImitateByteRange := 30
    for i := 0; i < newDataImitateByteRange; i++ {
        pseudoData := []byte{byte(i)}

        for _, handler := range processors {
            handler.NewData(pseudoData)
        }
    }
}

func main() {
    // file writer
    fileUpdate := &fileWriter{}
    processors = append(processors, fileUpdate)

    // sql writer
    sqlUpdate := &sqlWriter{}
    processors = append(processors, sqlUpdate)

    sqlUpdate.Start()
    fileUpdate.Start()

    go receiver()

    fmt.Scanln()
}

代码有效:https://play.golang.org/p/rSshsJYZ4h

输出:

proc *main.fileWriter [0]
proc *main.fileWriter [1]
proc *main.sqlWriter [0] (sleep)
proc *main.fileWriter [2] (Display after 5 seconds when the previous channel is processed)
proc *main.sqlWriter [1] (sleep)
proc *main.fileWriter [3] (Display after 5 seconds when the previous channel is processed)
proc *main.sqlWriter [2]
proc *main.fileWriter [4]
proc *main.sqlWriter [3]
proc *main.fileWriter [5]
proc *main.sqlWriter [4]
proc *main.fileWriter [6]

我想:

proc *main.fileWriter [0]
proc *main.fileWriter [1]
proc *main.fileWriter [2]
proc *main.fileWriter [3]
proc *main.fileWriter [4]
proc *main.fileWriter [5]
proc *main.fileWriter [6]
proc *main.sqlWriter [0] (after 5 seconds passed the handler started execution.)
proc *main.sqlWriter [1] (sleep)
proc *main.sqlWriter [2] (sleep)
proc *main.sqlWriter [3] (sleep)
proc *main.sqlWriter [4] (sleep)
proc *main.sqlWriter [5] (sleep)
proc *main.sqlWriter [6] (sleep)

我希望得到帮助,谢谢!

1 个答案:

答案 0 :(得分:0)

这听起来像你正在寻找的东西就像一个通道,它使用排列在其上的数据来调整(增长或缩小)。这可以通过在输入和输出通道之间建立队列来实现,并使用goroutine来为这些通道提供服务。这是一个解决方案: https://github.com/gammazero/bigchan#bigchan

我在BigChanfileWriter中使用sqlWriter作为数据渠道,它似乎有您要查找的结果。以下是您重新编写的代码:

package main

import (
    "fmt"
    "time"

    "github.com/gammazero/bigchan"
)

// Maximum number of items to buffer.  set to -1 for unlimited.
const limit = 65536

type procHandler interface {
    Start()
    NewData(newdata []byte)
}

type fileWriter struct {
    Data *bigchan.BigChan
}

func (proc *fileWriter) Start() {
    proc.Data = bigchan.New(limit)
    go func() {
        for {
            _obj := <-proc.Data.Out()
            obj := _obj.([]byte)

            fmt.Printf("proc %T ", proc)
            fmt.Println(obj)
        }
    }()
}

func (proc *fileWriter) NewData(newdata []byte) {
    proc.Data.In() <- newdata
}

type sqlWriter struct {
    Data *bigchan.BigChan
}

func (proc *sqlWriter) Start() {
    proc.Data = bigchan.New(limit)

    go func() {
        for {
            _obj := <-proc.Data.Out()
            obj := _obj.([]byte)
            time.Sleep(5 * time.Second)
            fmt.Printf("proc %T ", proc)
            fmt.Println(obj)
        }
    }()
}
func (proc *sqlWriter) NewData(newdata []byte) {
    proc.Data.In() <- newdata
}

var processors = []procHandler{}

func receiver() {
    newDataImitateByteRange := 30
    for i := 0; i < newDataImitateByteRange; i++ {
        pseudoData := []byte{byte(i)}

        for _, handler := range processors {
            handler.NewData(pseudoData)
        }
    }
}

func main() {
    // file writer
    fileUpdate := &fileWriter{}
    processors = append(processors, fileUpdate)

    // sql writer
    sqlUpdate := &sqlWriter{}
    processors = append(processors, sqlUpdate)

    sqlUpdate.Start()
    fileUpdate.Start()

    go receiver()

    fmt.Scanln()
}