I followed the example on the AWS site for gzipping files and streaming them to S3, found here: http://docs.aws.amazon.com/sdk-for-go/latest/v1/developerguide/common-examples.title.html
I am having an issue where the only thing landing in my S3 bucket are files with basically just the GZIP headers. Every single file is 23b in size.
Any idea what would cause this?
My code:
func (t *Table) Upload() {
year := time.Now().Format("2006")
month := time.Now().Format("01")
day := time.Now().Format("02")
reader, writer := io.Pipe()
go func() {
gw := gzip.NewWriter(writer)
io.Copy(gw, t.File)
t.File.Close()
gw.Close()
writer.Close()
}()
uploader := s3manager.NewUploader(session.New(&aws.Config{Region: aws.String(os.Getenv("AWS_REGION"))}))
result, err := uploader.Upload(&s3manager.UploadInput{
Body: reader,
Bucket: aws.String(os.Getenv("S3_BUCKET")),
Key: aws.String(fmt.Sprintf("%s/%s/%s/%s/%s", os.Getenv("S3_KEY"), year, month, day, t.Name+".csv.gz")),
})
if err != nil {
log.WithField("error", err).Fatal("Failed to upload file.")
}
log.WithField("location", result.Location).Info("Successfully uploaded to")
}