目前正在使用:https://github.com/Azure/azure-sdk-for-go
概述:我目前正在从azure blob商店下载blob,解析blob并将转录的blob上传回另一个名为filtered的文件夹中的商店。
问题:上传的blob不在过滤的文件夹中,而是在根目录中,blob为0B且没有数据。 blob上传似乎也破坏了我刚刚下载的blob,导致blob为0B而没有数据。下载blob工作正常,我能够获得数据的[]字节数组。
代码:
import (
"bufio"
"fmt"
"os"
"strings"
"strconv"
"math/big"
"bytes"
"io/ioutil"
"github.com/Azure/azure-sdk-for-go/storage"
"compress/gzip"
"encoding/base64"
"crypto/md5"
)
func main() {
var filter bool = true //check smart filter
test := 0
configfile, err := os.Open("config.txt") //open configfile
check(err) //check file opened
ConfigScanner := bufio.NewScanner(configfile) //open buffer
ConfigScanner.Scan() //get serial number
serialnum := ConfigScanner.Text()
configfile.Close() //close the config file
CanLUT := ParseDBC("file.dbc") //parse the associated DBC file
check(err) //check file opened
m := make(map[int64]string) //map of last seen message
//Azure API
client, err := storage.NewBasicClient(accountName, accountKey) //get client from azure
check(err)
bsc := client.GetBlobService() //access blob service
cnt := bsc.GetContainerReference("containerblob") //get container of the blob
LBP := storage.ListBlobsParameters{}
LBP.Prefix = "dev4/dev4" //only get blobs with dev4/dev4 prefix
list, err := cnt.ListBlobs(LBP) //get list of all matching blobs
check(err)
for _, b := range list.Blobs { //read all blobs from azure with prefix dev4/dev4
oa := make([]byte,0)
fmt.Println("getting blob: ",b.Name)
readCloser, err := b.Get(nil) //get blob data
check(err)
bytesRead, err := ioutil.ReadAll(readCloser) //read blob data to byte[]
check(err)
if len(bytesRead) < 1 {
continue
}
br := bytes.NewReader(bytesRead)
zr, err := gzip.NewReader(br) //use gzip reader for zipped data
check(err)
uz, err := ioutil.ReadAll(zr) //uz byte[] of unzipped file
check(err)
readCloser.Close() //close the reader
zr.Close() //close gzip reader
r := bytes.NewReader(uz)
scanner := bufio.NewScanner(r)
for scanner.Scan() { //loop on each line in the input file
temp := ParseToFrame(scanner.Text()) //parse the line into a usable struct
_, exists := m[temp.nodeid] //check if the frame has alread been seen and is stored in the hashmap
if exists { //if exists in the map
if ChkDuplicate(m, temp) { //is the msg a duplicate? if true the message isnt so add it
m[temp.nodeid] = temp.data //update the data to the hashmap
DecodeFrame(temp, &oa, CanLUT, filter, serialnum) //decode the frame and output it to another file
}
} else { //DNE in map so add it
m[temp.nodeid] = temp.data
DecodeFrame(temp, &oa, CanLUT,filter, serialnum) //decode the frame and output it to another file
}
}//end blob file
filestr := strings.Split(b.Name, "_")[1]
filestr = "filtered/filtered_" + filestr
var buffout bytes.Buffer
gz := gzip.NewWriter(&buffout)
_, err = gz.Write(oa)
check(err)
gz.Flush()
gz.Close()
compressedData := buffout.Bytes()
//push block blob to azure
fmt.Println("uploading: ",filestr)
clientnew, err := storage.NewBasicClient(accountName, accountKey) //get client from azure
check(err)
senderbsc := clientnew.GetBlobService() //access blob service
sendercnt := senderbsc.GetContainerReference("storeblob") //get container of store blob
bblob := sendercnt.GetBlobReference("filtered_" + strings.Split(b.Name, "/")[1])
err = bblob.CreateBlockBlob(nil)
check(err)
blockID := base64.StdEncoding.EncodeToString([]byte("00000"))
err = bblob.PutBlock(blockID, compressedData, nil)
check(err)
list, err := b.GetBlockList(storage.BlockListTypeUncommitted, nil)
check(err)
uncommittedBlocksList := make([]storage.Block, len(list.UncommittedBlocks))
for i := range list.UncommittedBlocks {
uncommittedBlocksList[i].ID = list.UncommittedBlocks[i].Name
uncommittedBlocksList[i].Status = storage.BlockStatusUncommitted
}
err = b.PutBlockList(uncommittedBlocksList, nil)
//check if upload was good.
CheckHash(&compressedData,filestr,sendercnt)
check(err)
if(test == 0){
break //test only read one file
}
test++
}//end for blobs
}//end main
答案 0 :(得分:1)
正如@DavidMakogon所说,您可以使用Azure Storage SDK for Go的API CreateBlockBlobFromReader
从任何读取器上传,将接口return base.IsAuthorized(actionContext);
实现到Azure Blob存储。
以下是我的示例代码。
io.Reader
希望它有所帮助。
答案 1 :(得分:0)
compressedData := buffout.Bytes()
//push block blob to azure
fmt.Println("uploading: ",filestr)
blockID := base64.StdEncoding.EncodeToString([]byte("00001"))
newblob := cnt.GetBlobReference(filestr)
err = newblob.CreateBlockBlobFromReader(bytes.NewReader(compressedData),nil)
check(err)
err = newblob.PutBlock(blockID, compressedData, nil)
check(err)
list, err := newblob.GetBlockList(storage.BlockListTypeUncommitted, nil)
check(err)
uncommittedBlocksList := make([]storage.Block, len(list.UncommittedBlocks))
for i := range list.UncommittedBlocks {
uncommittedBlocksList[i].ID = list.UncommittedBlocks[i].Name
uncommittedBlocksList[i].Status = storage.BlockStatusUncommitted
}
err = newblob.PutBlockList(uncommittedBlocksList, nil)
check(err)
这解决了我的问题,看着原来我打字错误。
list, err := b.GetBlockList(storage.BlockListTypeUncommitted, nil)
这导致azure获得一个名为filestr的新blob并覆盖原始blob。