File not uploaded to directory using go - go

Am trying to upload files to a directory using go code below.
source
The Issue am having is that when I run the code, it prints files successfully uploaded
but when I get to the directory, No file is uploaded there. Any solution will be appreciated. Thanks
package main
import (
"fmt"
"io"
"net/http"
"os"
)
func uploadHandler(w http.ResponseWriter, r *http.Request) {
// the FormFile function takes in the POST input id file
file, header, err := r.FormFile("file")
if err != nil {
fmt.Fprintln(w, err)
return
}
defer file.Close()
out, err := os.Create("/upload")
if err != nil {
fmt.Fprintf(w, "Unable to create the file for writing. Check your write access privilege")
return
}
defer out.Close()
// write the content from POST to the file
_, err = io.Copy(out, file)
if err != nil {
fmt.Fprintln(w, err)
}
fmt.Fprintf(w, "File uploaded successfully : ")
fmt.Fprintf(w, header.Filename)
}
func main() {
http.HandleFunc("/", uploadHandler)
http.ListenAndServe(":8080", nil)
}

Below is how I get my files uploaded to server.
Source
package main
import (
"fmt"
"io/ioutil"
"net/http"
)
func uploadFile(w http.ResponseWriter, r *http.Request) {
fmt.Println("File Upload Endpoint Hit")
// Parse our multipart form, 10 << 20 specifies a maximum
// upload of 10 MB files.
r.ParseMultipartForm(10 << 20)
// FormFile returns the first file for the given key `myFile`
// it also returns the FileHeader so we can get the Filename,
// the Header and the size of the file
file, handler, err := r.FormFile("myFile")
if err != nil {
fmt.Println("Error Retrieving the File")
fmt.Println(err)
return
}
defer file.Close()
fmt.Printf("Uploaded File: %+v\n", handler.Filename)
fmt.Printf("File Size: %+v\n", handler.Size)
fmt.Printf("MIME Header: %+v\n", handler.Header)
// Create a temporary file within our temp-images directory that follows
// a particular naming pattern
tempFile, err := ioutil.TempFile("temp-images", "upload-*.png")
if err != nil {
fmt.Println(err)
}
defer tempFile.Close()
// read all of the contents of our uploaded file into a
// byte array
fileBytes, err := ioutil.ReadAll(file)
if err != nil {
fmt.Println(err)
}
// write this byte array to our temporary file
tempFile.Write(fileBytes)
// return that we have successfully uploaded our file!
fmt.Fprintf(w, "Successfully Uploaded File\n")
}
func setupRoutes() {
http.HandleFunc("/upload", uploadFile)
http.ListenAndServe(":8080", nil)
}
func main() {
fmt.Println("Hello World")
setupRoutes()
}

Related

Golang: Facing error while creating .tar.gz file having large name

I am trying to create a .tar.gz file from folder that contains multiple files / folders. Once the .tar.gz file gets created, while extracting, the files are not not properly extracted. Mostly I think its because of large names or path exceeding some n characters, because same thing works when the filename/path is small. I referred this https://github.com/golang/go/issues/17630 and tried to add below code but it did not help.
header.Uid = 0
header.Gid = 0
I am using simple code seen below to create .tar.gz. The approach is, I create a temp folder, do some processing on the files and from that temp path, I create the .tar.gz file hence in the path below I am using pre-defined temp folder path.
package main
import (
"archive/tar"
"compress/gzip"
"fmt"
"io"
"log"
"os"
fp "path/filepath"
)
func main() {
// Create output file
out, err := os.Create("output.tar.gz")
if err != nil {
log.Fatalln("Error writing archive:", err)
}
defer out.Close()
// Create the archive and write the output to the "out" Writer
tmpDir := "C:/Users/USERNAME~1/AppData/Local/Temp/temp-241232063"
err = createArchive1(tmpDir, out)
if err != nil {
log.Fatalln("Error creating archive:", err)
}
fmt.Println("Archive created successfully")
}
func createArchive1(path string, targetFile *os.File) error {
gw := gzip.NewWriter(targetFile)
defer gw.Close()
tw := tar.NewWriter(gw)
defer tw.Close()
// walk through every file in the folder
err := fp.Walk(path, func(filePath string, info os.FileInfo, err error) error {
// ensure the src actually exists before trying to tar it
if _, err := os.Stat(filePath); err != nil {
return err
}
if err != nil {
return err
}
if info.IsDir() {
return nil
}
file, err := os.Open(filePath)
if err != nil {
return err
}
defer file.Close()
// generate tar header
header, err := tar.FileInfoHeader(info, info.Name())
header.Uid = 0
header.Gid = 0
if err != nil {
return err
}
header.Name = filePath //strings.TrimPrefix(filePath, fmt.Sprintf("%s/", fp.Dir(path))) //info.Name()
// write header
if err := tw.WriteHeader(header); err != nil {
return err
}
if _, err := io.Copy(tw, file); err != nil {
return err
}
return nil
})
return err
}
Please let me know what wrong I am doing.

Writing to file from cmd output

I am trying to write a small code in Go that will collect and save stats from IPFS.
So my Go code will execute IPFS command and save its output in .txt file and keep updating that .txt file.
I am having trouble doing that.
This is my code:
package main
import (
"fmt"
"io"
"log"
"os"
"os/exec"
"time"
)
func ipfsCommand() (ipfsOutput string) {
// output and error
out, err := exec.Command("ipfs","stats","bitswap","--human").Output()
// if there are errors, print/log them
if err != nil {
log.Printf("error!")
log.Fatal(err)
} else {
log.Printf("no error, printing output")
fmt.Printf("%s", out)
}
return
}
func writeToFile(message string) error {
f, err := os.Create("outputTest2_2.txt")
if err != nil {
return err
}
defer f.Close()
l, err := io.WriteString(f, message)
if err != nil {
fmt.Println(err)
f.Close()
return err
}
fmt.Println(l, "bytes written successfully")
return f.Sync()
}
func main() {
// get current time
currentTime := time.Now()
fmt.Println("YYYY.MM.DD : ", currentTime.Format("2006.01.02 15:04:05"))
writeToFile(currentTime)
// get output from ipfs command
msg := ipfsCommand()
// write the output to file
writeToFile(msg)
fmt.Println("file written!!!")
/* // write to file many times
for i:=0;i<3;i++{
// get output from ipfs command
msg := ipfsCommand()
// write the output to file
writeToFile(msg)
}*/
}
When the above code is run, this is the error:
# command-line-arguments
.\test2.go:49:13: cannot use currentTime (type time.Time) as type string in argument to writeToFile
Again, I want to get output from IPFS and save it to .txt file along with current time. I want to do this in loop because I want to save output from IPFS over a long period of time.
I tried to fix your script as is, but it just has too many issues. Here is a
rewrite, maybe you can use it as a new starting point:
package main
import (
"os"
"os/exec"
"time"
)
func main() {
f, err := os.Create("outputTest2_2.txt")
if err != nil {
panic(err)
}
defer f.Close()
currentTime, err := time.Now().MarshalText()
if err != nil {
panic(err)
}
f.Write(append(currentTime, '\n'))
msg, err := exec.Command("go", "env").Output()
if err != nil {
panic(err)
}
f.Write(msg)
}

Uploading for to internet site

With the below code I can download a file from internet asking with monitoring the downloaded percentage.
How can I do something to upload file to internet as well as monitoring the upload progress. I want to upload executable file at github assets
package main
import (
"fmt"
"io"
"net/http"
"os"
"strings"
"github.com/dustin/go-humanize"
)
// WriteCounter counts the number of bytes written to it. It implements to the io.Writer interface
// and we can pass this into io.TeeReader() which will report progress on each write cycle.
type WriteCounter struct {
Total uint64
}
func (wc *WriteCounter) Write(p []byte) (int, error) {
n := len(p)
wc.Total += uint64(n)
wc.PrintProgress()
return n, nil
}
func (wc WriteCounter) PrintProgress() {
// Clear the line by using a character return to go back to the start and remove
// the remaining characters by filling it with spaces
fmt.Printf("\r%s", strings.Repeat(" ", 35))
// Return again and print current status of download
// We use the humanize package to print the bytes in a meaningful way (e.g. 10 MB)
fmt.Printf("\rDownloading... %s complete", humanize.Bytes(wc.Total))
}
func main() {
fmt.Println("Download Started")
fileUrl := "https://upload.wikimedia.org/wikipedia/commons/d/d6/Wp-w4-big.jpg"
err := DownloadFile("avatar.jpg", fileUrl)
if err != nil {
panic(err)
}
fmt.Println("Download Finished")
}
// DownloadFile will download a url to a local file. It's efficient because it will
// write as it downloads and not load the whole file into memory. We pass an io.TeeReader
// into Copy() to report progress on the download.
func DownloadFile(filepath string, url string) error {
// Create the file, but give it a tmp file extension, this means we won't overwrite a
// file until it's downloaded, but we'll remove the tmp extension once downloaded.
out, err := os.Create(filepath + ".tmp")
if err != nil {
return err
}
// Get the data
resp, err := http.Get(url)
if err != nil {
out.Close()
return err
}
defer resp.Body.Close()
// Create our progress reporter and pass it to be used alongside our writer
counter := &WriteCounter{}
if _, err = io.Copy(out, io.TeeReader(resp.Body, counter)); err != nil {
out.Close()
return err
}
// The progress use the same line so print a new line once it's finished downloading
fmt.Print("\n")
// Close the file without defer so it can happen before Rename()
out.Close()
if err = os.Rename(filepath+".tmp", filepath); err != nil {
return err
}
return nil
}
I just modify your code. It works for my file server.
func UploadFile(filepath string, url string) error {
// Create the file, but give it a tmp file extension, this means we won't overwrite a
// file until it's downloaded, but we'll remove the tmp extension once downloaded.
out, err := os.Open(filepath)
if err != nil {
return err
}
// Create our progress reporter and pass it to be used alongside our writer
counter := &WriteCounter{}
// Get the data
resp, err := http.Post(url, "multipart/form-data", io.TeeReader(out, counter))
if err != nil {
out.Close()
log.Println(err.Error())
return err
}
defer resp.Body.Close()
// The progress use the same line so print a new line once it's finished downloading
fmt.Print("\n")
// Close the file without defer so it can happen before Rename()
out.Close()
return nil
}

Can't find a public file from url in go

I am trying to get the content of a publicly available file using ioutil.ReadFile() but it doesn't find the file: panic: open http://www.pdf995.com/samples/pdf.pdf: No such file or directory
Here's my code:
// Reading and writing files are basic tasks needed for
// many Go programs. First we'll look at some examples of
// reading files.
package main
import (
"fmt"
"io/ioutil"
)
// Reading files requires checking most calls for errors.
// This helper will streamline our error checks below.
func check(e error) {
if e != nil {
panic(e)
}
}
func main() {
fileInUrl, err := ioutil.ReadFile("http://www.pdf995.com/samples/pdf.pdf")
if err != nil {
panic(err)
}
fmt.Printf("HERE --- fileInUrl: %+v", fileInUrl)
}
Here's a go playground example
ioutil.ReadFile() does not support http.
If you look at the source code(https://golang.org/src/io/ioutil/ioutil.go?s=1503:1549#L42), open the file using os.Open.
I think I can do this coding.
package main
import (
"io"
"net/http"
"os"
)
func main() {
fileUrl := "http://www.pdf995.com/samples/pdf.pdf"
if err := DownloadFile("example.pdf", fileUrl); err != nil {
panic(err)
}
}
func DownloadFile(filepath string, url string) error {
// Get the data
resp, err := http.Get(url)
if err != nil {
return err
}
defer resp.Body.Close()
// Create the file
out, err := os.Create(filepath)
if err != nil {
return err
}
defer out.Close()
// Write the body to file
_, err = io.Copy(out, resp.Body)
return err
}
but, go playgound not protocol(go error dial tcp: Protocol not available).
so, You have to do it PC.

Golang AWS S3manager multipartreader w/ Goroutines

I'm creating an endpoint that allows a user to upload several files at the same time and store them in S3. Currently I'm able to achieve this using MultipartReader and s3manager but only in a non-synchronous fashion.
I'm trying to implement Go routines to speed this functionality up and have multiple files uploaded to S3 concurrently, but a data race error is causing trouble. I think *s3manager might not be goroutine safe as the docs say it is.
(Code works synchronously if go-statement is replaced with function code).
Could implementing mutex locks possibly fix my error?
func uploadHandler(w http.ResponseWriter, r *http.Request) {
counter := 0
switch r.Method {
// GET to display the upload form.
case "GET":
err := templates.Execute(w, nil)
if err != nil {
log.Print(err)
}
// POST uploads each file and sends them to S3
case "POST":
c := make(chan string)
// grab the request.MultipartReader
reader, err := r.MultipartReader()
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// copy each part to destination.
for {
part, err := reader.NextPart()
if err == io.EOF {
break
}
// if part.FileName() is empty, skip this iteration.
if part.FileName() == "" {
continue
}
counter++
go S3Upload(c, part)
}
for i := 0; i < counter; i++ {
fmt.Println(<-c)
}
// displaying a success message.
err = templates.Execute(w, "Upload successful.")
if err != nil {
log.Print(err)
}
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
}
func S3Upload(c chan string, part *multipart.Part) {
bucket := os.Getenv("BUCKET")
sess, err := session.NewSession(&aws.Config{
Region: aws.String(os.Getenv("REGION"))},
)
if err != nil {
c <- "error occured creating session"
return
}
uploader := s3manager.NewUploader(sess)
_, err = uploader.Upload(&s3manager.UploadInput{
Bucket: aws.String(bucket),
Key: aws.String(part.FileName()),
Body: part,
})
if err != nil {
c <- "Error occurred attempting to upload to S3"
return
}
// successful upload
c <- "successful upload"
}
^ see all the comments above,
here is some modified code example, channels not useful here.
package main
import (
"bytes"
"io"
"log"
"net/http"
"os"
"strings"
"sync"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
)
var (
setupUploaderOnce sync.Once
uploader *s3manager.Uploader
bucket string
region string
)
// ensure sessions and uploader are setup only once using a Singleton pattern
func setupUploader() {
setupUploaderOnce.Do(func() {
bucket = os.Getenv("BUCKET")
region = os.Getenv("REGION")
sess, err := session.NewSession(&aws.Config{Region: aws.String(region)})
if err != nil {
log.Fatal(err)
}
uploader := s3manager.NewUploader(sess)
})
}
// normally singleton stuff is packaged out and called before starting the server, but to keep the example a single file, load it up here
func init() {
setupUploader()
}
func uploadHandler(w http.ResponseWriter, r *http.Request) {
counter := 0
switch r.Method {
// GET to display the upload form.
case "GET":
err := templates.Execute(w, nil)
if err != nil {
log.Print(err)
}
// POST uploads each file and sends them to S3
case "POST":
var buf bytes.Buffer
// "file" is defined by the form field, change it to whatever your form sets it too
file, header, err := r.FormFile("file")
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// close the file
defer file.Close()
fileName := strings.Split(header.Filename, ".")
// load the entire file data to the buffer
_, err = io.Copy(&buf, file)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// copy each part to destination.
go S3Upload(buf, fileName[0])
// displaying a success message.
err = templates.Execute(w, "Upload successful.")
if err != nil {
log.Print(err)
}
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
}
// keeping this simple, do something with the err, like log
// if the uploader fails in the goroutine, there is potential
// for false positive uploads... channels are not really good here
// either, for that, bubble the error up,
// and don't spin up a goroutine.. same thing as waiting for the channel to return.
func S3Upload(body bytes.Buffer, fileName string) {
_, err := uploader.Upload(&s3manager.UploadInput{
Bucket: aws.String(bucket),
Key: aws.String(fileName),
Body: bytes.NewReader(body.Bytes()),
})
}

Resources