Currently using: https://github.com/Azure/azure-sdk-for-go
overview: I'm currently downloading a blob from the azure blob store, parsing the blob and uploading a transcribed blob back to the store in another folder called filtered.
Problem: The blob uploaded is not in the folder filtered but in the root directory and the blob is 0B with no data. The blob upload also seems to destroy the blob i just downloaded resulting in the blob being 0B with no data. Downloading the blob works fine and i'm able to get the []byte array of the data.
Code:
import (
"bufio"
"fmt"
"os"
"strings"
"strconv"
"math/big"
"bytes"
"io/ioutil"
"github.com/Azure/azure-sdk-for-go/storage"
"compress/gzip"
"encoding/base64"
"crypto/md5"
)
func main() {
var filter bool = true //check smart filter
test := 0
configfile, err := os.Open("config.txt") //open configfile
check(err) //check file opened
ConfigScanner := bufio.NewScanner(configfile) //open buffer
ConfigScanner.Scan() //get serial number
serialnum := ConfigScanner.Text()
configfile.Close() //close the config file
CanLUT := ParseDBC("file.dbc") //parse the associated DBC file
check(err) //check file opened
m := make(map[int64]string) //map of last seen message
//Azure API
client, err := storage.NewBasicClient(accountName, accountKey) //get client from azure
check(err)
bsc := client.GetBlobService() //access blob service
cnt := bsc.GetContainerReference("containerblob") //get container of the blob
LBP := storage.ListBlobsParameters{}
LBP.Prefix = "dev4/dev4" //only get blobs with dev4/dev4 prefix
list, err := cnt.ListBlobs(LBP) //get list of all matching blobs
check(err)
for _, b := range list.Blobs { //read all blobs from azure with prefix dev4/dev4
oa := make([]byte,0)
fmt.Println("getting blob: ",b.Name)
readCloser, err := b.Get(nil) //get blob data
check(err)
bytesRead, err := ioutil.ReadAll(readCloser) //read blob data to byte[]
check(err)
if len(bytesRead) < 1 {
continue
}
br := bytes.NewReader(bytesRead)
zr, err := gzip.NewReader(br) //use gzip reader for zipped data
check(err)
uz, err := ioutil.ReadAll(zr) //uz byte[] of unzipped file
check(err)
readCloser.Close() //close the reader
zr.Close() //close gzip reader
r := bytes.NewReader(uz)
scanner := bufio.NewScanner(r)
for scanner.Scan() { //loop on each line in the input file
temp := ParseToFrame(scanner.Text()) //parse the line into a usable struct
_, exists := m[temp.nodeid] //check if the frame has alread been seen and is stored in the hashmap
if exists { //if exists in the map
if ChkDuplicate(m, temp) { //is the msg a duplicate? if true the message isnt so add it
m[temp.nodeid] = temp.data //update the data to the hashmap
DecodeFrame(temp, &oa, CanLUT, filter, serialnum) //decode the frame and output it to another file
}
} else { //DNE in map so add it
m[temp.nodeid] = temp.data
DecodeFrame(temp, &oa, CanLUT,filter, serialnum) //decode the frame and output it to another file
}
}//end blob file
filestr := strings.Split(b.Name, "_")[1]
filestr = "filtered/filtered_" + filestr
var buffout bytes.Buffer
gz := gzip.NewWriter(&buffout)
_, err = gz.Write(oa)
check(err)
gz.Flush()
gz.Close()
compressedData := buffout.Bytes()
//push block blob to azure
fmt.Println("uploading: ",filestr)
clientnew, err := storage.NewBasicClient(accountName, accountKey) //get client from azure
check(err)
senderbsc := clientnew.GetBlobService() //access blob service
sendercnt := senderbsc.GetContainerReference("storeblob") //get container of store blob
bblob := sendercnt.GetBlobReference("filtered_" + strings.Split(b.Name, "/")[1])
err = bblob.CreateBlockBlob(nil)
check(err)
blockID := base64.StdEncoding.EncodeToString([]byte("00000"))
err = bblob.PutBlock(blockID, compressedData, nil)
check(err)
list, err := b.GetBlockList(storage.BlockListTypeUncommitted, nil)
check(err)
uncommittedBlocksList := make([]storage.Block, len(list.UncommittedBlocks))
for i := range list.UncommittedBlocks {
uncommittedBlocksList[i].ID = list.UncommittedBlocks[i].Name
uncommittedBlocksList[i].Status = storage.BlockStatusUncommitted
}
err = b.PutBlockList(uncommittedBlocksList, nil)
//check if upload was good.
CheckHash(&compressedData,filestr,sendercnt)
check(err)
if(test == 0){
break //test only read one file
}
test++
}//end for blobs
}//end main