Execute template to file - go

I have a template file template.html as follow
Hello {{.Name}}, welcome!
and the code
import (
"fmt"
"text/template"
)
func main() {
type person struct {
Name string
}
p := &person{"clinyong"}
t := template.Must(template.New("template.html").ParseFiles("template.html"))
f, err := os.OpenFile("test", os.O_CREATE, 0777)
if err != nil {
fmt.Println(err)
return
}
defer f.Close()
err := t.Execute(f, p)
if err != nil {
fmt.Println(err)
}
}
t.Execute(f, p) calls a error, saying that f is a bad file descriptor.
Is it possible to execute a template output to a file as shown above? I see some examples, the f in Execute is almost http.ResponseWriter or os.Stdout.

As #TimCooper said, I need to include os.O_WRONLY in os.OpenFile.

Related

Writing to file from cmd output

I am trying to write a small code in Go that will collect and save stats from IPFS.
So my Go code will execute IPFS command and save its output in .txt file and keep updating that .txt file.
I am having trouble doing that.
This is my code:
package main
import (
"fmt"
"io"
"log"
"os"
"os/exec"
"time"
)
func ipfsCommand() (ipfsOutput string) {
// output and error
out, err := exec.Command("ipfs","stats","bitswap","--human").Output()
// if there are errors, print/log them
if err != nil {
log.Printf("error!")
log.Fatal(err)
} else {
log.Printf("no error, printing output")
fmt.Printf("%s", out)
}
return
}
func writeToFile(message string) error {
f, err := os.Create("outputTest2_2.txt")
if err != nil {
return err
}
defer f.Close()
l, err := io.WriteString(f, message)
if err != nil {
fmt.Println(err)
f.Close()
return err
}
fmt.Println(l, "bytes written successfully")
return f.Sync()
}
func main() {
// get current time
currentTime := time.Now()
fmt.Println("YYYY.MM.DD : ", currentTime.Format("2006.01.02 15:04:05"))
writeToFile(currentTime)
// get output from ipfs command
msg := ipfsCommand()
// write the output to file
writeToFile(msg)
fmt.Println("file written!!!")
/* // write to file many times
for i:=0;i<3;i++{
// get output from ipfs command
msg := ipfsCommand()
// write the output to file
writeToFile(msg)
}*/
}
When the above code is run, this is the error:
# command-line-arguments
.\test2.go:49:13: cannot use currentTime (type time.Time) as type string in argument to writeToFile
Again, I want to get output from IPFS and save it to .txt file along with current time. I want to do this in loop because I want to save output from IPFS over a long period of time.
I tried to fix your script as is, but it just has too many issues. Here is a
rewrite, maybe you can use it as a new starting point:
package main
import (
"os"
"os/exec"
"time"
)
func main() {
f, err := os.Create("outputTest2_2.txt")
if err != nil {
panic(err)
}
defer f.Close()
currentTime, err := time.Now().MarshalText()
if err != nil {
panic(err)
}
f.Write(append(currentTime, '\n'))
msg, err := exec.Command("go", "env").Output()
if err != nil {
panic(err)
}
f.Write(msg)
}

Generate .pb file without using protoc in golang

I'm trying to generate .pb.go file using service.proto as file input in Go.
Is there a way to do it without using protoc binary (like directly using package github.com/golang/protobuf/protoc-gen-go)?
If you have a detail.proto like this:
message AppDetails {
optional string version = 4;
}
You can parse it into a message like this:
package main
import (
"fmt"
"github.com/golang/protobuf/proto"
"github.com/jhump/protoreflect/desc/protoparse"
"github.com/jhump/protoreflect/dynamic"
)
func parse(file, msg string) (*dynamic.Message, error) {
var p protoparse.Parser
fd, err := p.ParseFiles(file)
if err != nil {
return nil, err
}
md := fd[0].FindMessage(msg)
return dynamic.NewMessage(md), nil
}
func main() {
b := []byte("\"\vhello world")
m, err := parse("detail.proto", "AppDetails")
if err != nil {
panic(err)
}
if err := proto.Unmarshal(b, m); err != nil {
panic(err)
}
fmt.Println(m) // version:"hello world"
}
However you may notice, this package is still using the old Protobuf V1. I did
find a Pull Request for V2:
https://github.com/jhump/protoreflect/pull/354

The process cannot access the file because it is being used by another process in Golang

The process cannot access the file ... because it is being used by another process
I can't Remover Zip file with this code ..
it's possible? extract and delete the file in one code.
Code
package main
import (
"archive/zip"
"fmt"
"io"
"log"
"net/http"
"os"
"path/filepath"
"strings"
)
func main() {
url := "https://230c07c8-77b2-4c0d-9b82-8c6501a5bc45.filesusr.com/archives/b7572a_9ec985e0031042ef912cb40cafbe6376.zip?dn=7.zip"
out, _ := os.Create("E:\\experi\\1234567890.zip")
defer out.Close()
resp, _ := http.Get(url)
defer resp.Body.Close()
_, _ = io.Copy(out, resp.Body)
files, err := Unzip("E:\\experi\\1234567890.zip", "E:\\experi\\1234567890")
if err != nil {
log.Fatal(err)
}
fmt.Println("Unzipped the following files:\n" + strings.Join(files, "\n"))
}
func Unzip(src string, destination string) ([]string, error) {
var filenames []string
r, err := zip.OpenReader(src)
if err != nil {
return filenames, err
}
defer r.Close()
for _, f := range r.File {
fpath := filepath.Join(destination, f.Name)
if !strings.HasPrefix(fpath, filepath.Clean(destination)+string(os.PathSeparator)){
return filenames, fmt.Errorf("%s is an illegal filepath", fpath)
}
filenames = append(filenames, fpath)
if f.FileInfo().IsDir() {
os.MkdirAll(fpath, os.ModePerm)
continue
}
if err = os.MkdirAll(filepath.Dir(fpath), os.ModePerm); err != nil {
return filenames, err
}
outFile, err := os.OpenFile(fpath,
os.O_WRONLY|os.O_CREATE|os.O_TRUNC | os.O_RDWR,
f.Mode())
if err != nil {
return filenames, err
}
rc, err := f.Open()
if err != nil {
return filenames, err
}
_, err = io.Copy(outFile, rc)
outFile.Close()
rc.Close()
if err != nil {
return filenames, err
}
}
removeFile()
return filenames, nil
}
func removeFile() {
error := os.Remove("E:\\experi\\1234567890.zip")
if error != nil {
log.Fatal(error)
}
}
Output
output text
2020/10/28 13:09:04 remove E:\experi\1234567890.zip: The process cannot access the file because it is being used by another process.
Process finished with exit code 1
Any other way to do this same thing ?
Did I go wrong anywhere?
Help Would be Much Appreciated. Thanks in Advance. :)
out, _ := os.Create("E:\\experi\\1234567890.zip") creates or truncates the file and returns you a *File (so the file is open).
defer out.Close() closes the file "the moment the surrounding function returns" (spec).
So at the time you call Unzip you have the file open. To fix this call out.Close() before the call to Unzip (and please don't assume that calls complete without error).
If you close using the defer, it is closed after performing up to the last line of the function. You must explicitly close the file before remove it.

How to read a text file? [duplicate]

This question already has answers here:
How can I read a whole file into a string variable
(7 answers)
Closed 4 years ago.
I'm trying to read "file.txt" and put the contents into a variable using Golang. Here is what I've tried...
package main
import (
"fmt"
"os"
"log"
)
func main() {
file, err := os.Open("file.txt")
if err != nil {
log.Fatal(err)
}
fmt.Print(file)
}
The file gets read successfully and the return from os.Open returns a type of *os.File
It depends on what you are trying to do.
file, err := os.Open("file.txt")
fmt.print(file)
The reason it outputs &{0xc082016240}, is because you are printing the pointer value of a file-descriptor (*os.File), not file-content. To obtain file-content, you may READ from a file-descriptor.
To read all file content(in bytes) to memory, ioutil.ReadAll
package main
import (
"fmt"
"io/ioutil"
"os"
"log"
)
func main() {
file, err := os.Open("file.txt")
if err != nil {
log.Fatal(err)
}
defer func() {
if err = file.Close(); err != nil {
log.Fatal(err)
}
}()
b, err := ioutil.ReadAll(file)
fmt.Print(b)
}
But sometimes, if the file size is big, it might be more memory-efficient to just read in chunks: buffer-size, hence you could use the implementation of io.Reader.Read from *os.File
func main() {
file, err := os.Open("file.txt")
if err != nil {
log.Fatal(err)
}
defer func() {
if err = file.Close(); err != nil {
log.Fatal(err)
}
}()
buf := make([]byte, 32*1024) // define your buffer size here.
for {
n, err := file.Read(buf)
if n > 0 {
fmt.Print(buf[:n]) // your read buffer.
}
if err == io.EOF {
break
}
if err != nil {
log.Printf("read %d bytes: %v", n, err)
break
}
}
}
Otherwise, you could also use the standard util package: bufio, try Scanner. A Scanner reads your file in tokens: separator.
By default, scanner advances the token by newline (of course you can customise how scanner should tokenise your file, learn from here the bufio test).
package main
import (
"fmt"
"os"
"log"
"bufio"
)
func main() {
file, err := os.Open("file.txt")
if err != nil {
log.Fatal(err)
}
defer func() {
if err = file.Close(); err != nil {
log.Fatal(err)
}
}()
scanner := bufio.NewScanner(file)
for scanner.Scan() { // internally, it advances token based on sperator
fmt.Println(scanner.Text()) // token in unicode-char
fmt.Println(scanner.Bytes()) // token in bytes
}
}
Lastly, I would also like to reference you to this awesome site: go-lang file cheatsheet. It encompassed pretty much everything related to working with files in go-lang, hope you'll find it useful.

Read text file into string array (and write)

The ability to read (and write) a text file into and out of a string array is I believe a fairly common requirement. It is also quite useful when starting with a language removing the need initially to access a database. Does one exist in Golang?
e.g.
func ReadLines(sFileName string, iMinLines int) ([]string, bool) {
and
func WriteLines(saBuff[]string, sFilename string) (bool) {
I would prefer to use an existing one rather than duplicate.
As of Go1.1 release, there is a bufio.Scanner API that can easily read lines from a file. Consider the following example from above, rewritten with Scanner:
package main
import (
"bufio"
"fmt"
"log"
"os"
)
// readLines reads a whole file into memory
// and returns a slice of its lines.
func readLines(path string) ([]string, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
var lines []string
scanner := bufio.NewScanner(file)
for scanner.Scan() {
lines = append(lines, scanner.Text())
}
return lines, scanner.Err()
}
// writeLines writes the lines to the given file.
func writeLines(lines []string, path string) error {
file, err := os.Create(path)
if err != nil {
return err
}
defer file.Close()
w := bufio.NewWriter(file)
for _, line := range lines {
fmt.Fprintln(w, line)
}
return w.Flush()
}
func main() {
lines, err := readLines("foo.in.txt")
if err != nil {
log.Fatalf("readLines: %s", err)
}
for i, line := range lines {
fmt.Println(i, line)
}
if err := writeLines(lines, "foo.out.txt"); err != nil {
log.Fatalf("writeLines: %s", err)
}
}
Note: ioutil is deprecated as of Go 1.16.
If the file isn't too large, this can be done with the ioutil.ReadFile and strings.Split functions like so:
content, err := ioutil.ReadFile(filename)
if err != nil {
//Do something
}
lines := strings.Split(string(content), "\n")
You can read the documentation on ioutil and strings packages.
Cannot update first answer.
Anyway, after Go1 release, there are some breaking changes, so I updated as shown below:
package main
import (
"os"
"bufio"
"bytes"
"io"
"fmt"
"strings"
)
// Read a whole file into the memory and store it as array of lines
func readLines(path string) (lines []string, err error) {
var (
file *os.File
part []byte
prefix bool
)
if file, err = os.Open(path); err != nil {
return
}
defer file.Close()
reader := bufio.NewReader(file)
buffer := bytes.NewBuffer(make([]byte, 0))
for {
if part, prefix, err = reader.ReadLine(); err != nil {
break
}
buffer.Write(part)
if !prefix {
lines = append(lines, buffer.String())
buffer.Reset()
}
}
if err == io.EOF {
err = nil
}
return
}
func writeLines(lines []string, path string) (err error) {
var (
file *os.File
)
if file, err = os.Create(path); err != nil {
return
}
defer file.Close()
//writer := bufio.NewWriter(file)
for _,item := range lines {
//fmt.Println(item)
_, err := file.WriteString(strings.TrimSpace(item) + "\n");
//file.Write([]byte(item));
if err != nil {
//fmt.Println("debug")
fmt.Println(err)
break
}
}
/*content := strings.Join(lines, "\n")
_, err = writer.WriteString(content)*/
return
}
func main() {
lines, err := readLines("foo.txt")
if err != nil {
fmt.Println("Error: %s\n", err)
return
}
for _, line := range lines {
fmt.Println(line)
}
//array := []string{"7.0", "8.5", "9.1"}
err = writeLines(lines, "foo2.txt")
fmt.Println(err)
}
You can use os.File (which implements the io.Reader interface) with the bufio package for that. However, those packages are build with fixed memory usage in mind (no matter how large the file is) and are quite fast.
Unfortunately this makes reading the whole file into the memory a bit more complicated. You can use a bytes.Buffer to join the parts of the line if they exceed the line limit. Anyway, I recommend you to try to use the line reader directly in your project (especially if do not know how large the text file is!). But if the file is small, the following example might be sufficient for you:
package main
import (
"os"
"bufio"
"bytes"
"fmt"
)
// Read a whole file into the memory and store it as array of lines
func readLines(path string) (lines []string, err os.Error) {
var (
file *os.File
part []byte
prefix bool
)
if file, err = os.Open(path); err != nil {
return
}
reader := bufio.NewReader(file)
buffer := bytes.NewBuffer(make([]byte, 1024))
for {
if part, prefix, err = reader.ReadLine(); err != nil {
break
}
buffer.Write(part)
if !prefix {
lines = append(lines, buffer.String())
buffer.Reset()
}
}
if err == os.EOF {
err = nil
}
return
}
func main() {
lines, err := readLines("foo.txt")
if err != nil {
fmt.Println("Error: %s\n", err)
return
}
for _, line := range lines {
fmt.Println(line)
}
}
Another alternative might be to use io.ioutil.ReadAll to read in the complete file at once and do the slicing by line afterwards. I don't give you an explicit example of how to write the lines back to the file, but that's basically an os.Create() followed by a loop similar to that one in the example (see main()).
func readToDisplayUsingFile1(f *os.File){
defer f.Close()
reader := bufio.NewReader(f)
contents, _ := ioutil.ReadAll(reader)
lines := strings.Split(string(contents), '\n')
}
or
func readToDisplayUsingFile1(f *os.File){
defer f.Close()
slice := make([]string,0)
reader := bufio.NewReader(f)
for{
str, err := reader.ReadString('\n')
if err == io.EOF{
break
}
slice = append(slice, str)
}

Resources