How to create slice with filenames - go

There is a program which creates file per second. I want to append file names into slice and print them. Now my program executes incorrect, it appends names but only for one file name. So I expect to get []string{"1","2","3"}, instead I get []string{"1","1","1"}, []string{"2","2","2"}, []string{"3","3","3"}. How to correct my prog to get expected result?
package main
import (
"encoding/csv"
"fmt"
"os"
"strconv"
"time"
)
func main() {
for {
time.Sleep(1 * time.Second)
createFile()
}
}
func createFile() {
rowFile := time.Now().Second()
fileName := strconv.Itoa(rowFile)
file, err := os.OpenFile(fileName, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644)
if err != nil {
fmt.Println(err)
}
defer file.Close()
writer := csv.NewWriter(file)
writer.Comma = '|'
err = writer.Write([]string{""})
if err != nil {
fmt.Println(err)
}
countFiles(fileName)
}
func countFiles(fileName string) {
arrFiles := make([]string, 0, 3)
for i := 0; i < 3; i++ {
arrFiles = append(arrFiles, fileName)
}
fmt.Println(arrFiles)// here I expect ["1","2","3"] then ["4","5","6"] and so on. But now there is ["1","1","1"] then ["2","2","2"] and so on
}

createFile() does not persist created file names in any way. You can do something like that:
package main
import (
"encoding/csv"
"fmt"
"os"
"strconv"
"time"
)
func main() {
files := []string{}
for {
time.Sleep(1 * time.Second)
files = append(files, createFile())
fmt.Println(files)
}
}
func createFile() string {
rowFile := time.Now().Second()
fileName := strconv.Itoa(rowFile)
file, err := os.OpenFile(fileName, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644)
if err != nil {
fmt.Println(err)
}
defer file.Close()
writer := csv.NewWriter(file)
writer.Comma = '|'
err = writer.Write([]string{""})
if err != nil {
fmt.Println(err)
}
return fileName
}

Related

Golang segment stdin stream to file

instead of writing a pipe to a huge file i want to segment the stream in chunks on signal USR1. i think i got the basics working but the app just hangs and nothing happens, any clues or best practices when handling with an uncontrollable input stream and byte perfect segmentation?
package main
import (
"bufio"
"fmt"
"io"
"os"
"os/signal"
"syscall"
"time"
)
var done bool
func handle(c chan os.Signal) {
for {
sig := <-c
switch sig {
case syscall.SIGUSR1:
fmt.Println("###Sink temporarily_closed###")
done = true
case syscall.SIGUSR2:
fmt.Println("###Sink closed###")
done = true
case syscall.SIGHUP:
fmt.Println("###Sink running###")
}
}
}
func check(e error) {
if e != nil {
panic(e)
}
}
func main() {
c := make(chan os.Signal, 1)
signal.Notify(c, syscall.SIGUSR1, syscall.SIGUSR2, syscall.SIGHUP)
go handle(c)
reader := bufio.NewReaderSize(os.Stdin,1024*10)
for true {
if done {
file, err := os.Create("./temp.file")
check(err)
writer := bufio.NewWriter(file)
written, err := io.Copy(writer,reader)
check(err)
fmt.Println(written)
writer.Flush()
file.Close()
reader.Reset(os.Stdin)
done = false
}
time.Sleep(time.Millisecond)
}
}
So you need to io.CopyN(dst, src, 4096) in the loop and rotate the file once in a while. See example. I made rotation by size but it is easy to add signal handling.
package main
import (
"fmt"
"io"
"log"
"os"
"time"
)
var count int
var f *os.File
func rotate() *os.File {
if f != nil {
if err := f.Close(); err != nil {
log.Fatal(err)
}
}
fname := fmt.Sprintf("./dump-%d.bin", count)
count++
f, err := os.Create(fname)
if err != nil {
log.Fatal(err)
}
log.Println("rotated:", fname)
return f
}
func main() {
var n, written int
reader := os.Stdin
for {
if written == 0 || written >= 4096*10 {
f = rotate()
written = 0
}
n, err := io.CopyN(f, reader, 4096)
if err != nil {
log.Fatal(err)
}
written += n
log.Println("written:", written)
time.Sleep(time.Millisecond * 500)
}
}

Go udp connection dial once, and send many

please consider this code below, it is a simplified version of a service. I launch a no. of goroutines as needed during its lifetime, and as they go about doing things, they need to send udp messages to a set destination.
package main
import (
"fmt"
"log"
"net"
"time"
)
const (
udp_dest = "192.168.1.200:514"
)
func main() {
fmt.Println("Hello")
message := "this is a test"
log_message(&message)
go worker(1)
go worker(2)
go worker(3)
go worker(4)
time.Sleep(3009 * time.Second)
}
func log_message(message *string) {
RemoteAddr, err := net.ResolveUDPAddr("udp", udp_dest)
if err != nil {
//fmt.Println("Err, net.ResolveUDPAddr", err)
return
}
conn, err := net.DialUDP("udp", nil, RemoteAddr)
if err != nil {
return
}
udp_message := fmt.Sprintf("<30> %s", *message)
Bytes, _ := conn.Write([]byte(udp_message))
log.Printf("Sent %d Bytes to %s\n", Bytes, udp_dest)
}
func worker(tag int) {
i := 0
for {
worker_message := fmt.Sprintf("Some message from worker%d, loop: %d", tag, i)
log_message(&worker_message)
// do some work..
time.Sleep(300 * time.Second)
i += 1
}
}
In my log_message, everytime it gets called we're calling net.DialUDP which I feel is wasteful. I tried experimenting with global variables &net.UDPConn et al, but could not get to work.
Please show how to achieve/optimize this? There's only one UDP destination, and I'd like the daemon to Dial once at its start, and then just Write as needed.
Thanks!
here's what I got so far:
package main
import (
"fmt"
"log"
"net"
"time"
)
const (
udp_dest = "192.168.1.200:514"
)
var (
myconn *net.UDPConn
)
func main() {
fmt.Println("Hello")
message := "this is a test"
log_message(&message)
go worker(1)
go worker(2)
go worker(3)
go worker(4)
time.Sleep(3009 * time.Second)
}
func log_message(message *string) {
if myconn == nil {
fmt.Println("Setting up myconn!")
RemoteAddr, err := net.ResolveUDPAddr("udp", udp_dest)
if err != nil {
//fmt.Println("Err, net.ResolveUDPAddr", err)
return
}
myconn, err = net.DialUDP("udp", nil, RemoteAddr)
if err != nil {
return
}
}
udp_message := fmt.Sprintf("<30> %s", *message)
Bytes, _ := myconn.Write([]byte(udp_message))
log.Printf("Sent %d Bytes to %s\n", Bytes, udp_dest)
}
func worker(tag int) {
i := 0
for {
worker_message := fmt.Sprintf("Some message from worker%d, loop: %d", tag, i)
log_message(&worker_message)
// do some work..
time.Sleep(10 * time.Second)
i += 1
}
}
You are almost there. Move the setup code to a function and call it before starting the goroutines.
func main() {
if err := setupLog(); err != nil {
log.Fatal(err)
}
fmt.Println("Hello")
... same as before
}
func setupLog() error {
fmt.Println("Setting up myconn!")
RemoteAddr, err := net.ResolveUDPAddr("udp", udp_dest)
if err != nil {
return err
}
myconn, err = net.DialUDP("udp", nil, RemoteAddr)
return err
}
func log_message(message *string) {
udp_message := fmt.Sprintf("<30> %s", *message)
Bytes, _ := myconn.Write([]byte(udp_message))
log.Printf("Sent %d Bytes to %s\n", Bytes, udp_dest)
}
The code in the question does not work because there's a data race on myconn.

Get permanent MAC address

Is there an easy way to get the permanent MAC Address using Go?
package main
import (
"fmt"
"log"
"net"
)
func getMacAddr() ([]string, error) {
ifas, err := net.Interfaces()
if err != nil {
return nil, err
}
var as []string
for _, ifa := range ifas {
a := ifa.HardwareAddr.String()
if a != "" {
as = append(as, a)
}
}
return as, nil
}
func main() {
as, err := getMacAddr()
if err != nil {
log.Fatal(err)
}
for _, a := range as {
fmt.Println(a)
}
}

how to generate multiple uuid and md5 files in golang

Hi I've generated Md5 and uuid in golang but now I want generate it for multiple files using command line arguments, so what exactly I've to do. This is how I've generated my md5 and uuid:
package main
import (
"crypto/rand"
"crypto/md5"
"fmt"
"io"
"os"
"log"
"text/template"
)
type Data struct {
Uuid string
Md5 string
}
func main() {
uuid, err := newUUID()
if err != nil {
fmt.Printf("error: %v\n", err)
}
fmt.Printf("UUID: %s\n", uuid)
md5 := Getmd5(uuid)
fmt.Printf("Checksum: %s\n",md5)
fillData := Data{uuid, md5}
file, err := os.Create("text.txt")
if err != nil {
return
}
defer file.Close()
templ, err := template.ParseFiles("template.html")
if err !=nil{
log.Fatalln(err)
}
err = templ.Execute(file,fillData)
if err != nil{
log.Fatalln(err)
}
}
// newUUID generates a random UUID according to RFC 4122
func newUUID() (string, error) {
uuid := make([]byte, 16)
n, err := io.ReadFull(rand.Reader, uuid)
if n != len(uuid) || err != nil {
return "", err
}
// variant bits
uuid[8] = uuid[8]&^0xc0 | 0x80
// version 4 (pseudo-random)
uuid[6] = uuid[6]&^0xf0 | 0x40
return fmt.Sprintf("%x-%x-%x-%x-%x", uuid[0:4], uuid[4:6], uuid[6:8], uuid[8:10], uuid[10:]), nil
}
func Getmd5(uuid string) (string) {
data := []byte(uuid)
//md5_buffer := fmt.Sprintf("%x", md5.Sum(data))
md5_buffer := md5.Sum(data)
return fmt.Sprintf("{0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x};\n",md5_buffer[0:1],
md5_buffer[1:2],md5_buffer[2:3],md5_buffer[3:4],md5_buffer[4:5],md5_buffer[5:6],md5_buffer[6:7],md5_buffer[7:8],
md5_buffer[8:9],md5_buffer[9:10],md5_buffer[10:11],md5_buffer[11:12],md5_buffer[12:13],md5_buffer[13:14],md5_buffer[14:15],
md5_buffer[15:16])
}
Can anyone help me out?
You can use os.Args to accept command line arguements
os.Args provides access to raw command-line arguments. Note that the first value in this slice is the path to the program, and os.Args[1:] holds the arguments to the program.
Your program will look like this, have a look at createFile and getNumberOfFiles functions and the main
package main
import (
"crypto/md5"
"crypto/rand"
"errors"
"fmt"
"io"
"log"
"os"
"strconv"
"text/template"
)
type Data struct {
Uuid string
Md5 string
}
func createFile(uuid string) {
md5 := Getmd5(uuid)
fmt.Printf("Checksum: %s\n", md5)
fillData := Data{uuid, md5}
file, err := os.Create(uuid + ".txt")
if err != nil {
return
}
defer file.Close()
templ, err := template.ParseFiles("template.html")
if err != nil {
log.Fatalln(err)
}
err = templ.Execute(file, fillData)
if err != nil {
log.Fatalln(err)
}
}
func getNumberOfFiles() (num int, err error) {
if len(os.Args) == 1 {
return 0, errors.New("Not enough arguements")
}
if num, err = strconv.Atoi(os.Args[1]); err != nil {
return
}
return num, nil
}
func main() {
numberOfFiles, err := getNumberOfFiles()
if err != nil {
fmt.Println(err.Error())
}
fmt.Printf("Creating %d files", numberOfFiles)
for i := 0; i < numberOfFiles; i++ {
uuid, err := newUUID()
if err != nil {
fmt.Printf("error: %v\n", err)
}
createFile(uuid)
}
}
// newUUID generates a random UUID according to RFC 4122
func newUUID() (string, error) {
uuid := make([]byte, 16)
n, err := io.ReadFull(rand.Reader, uuid)
if n != len(uuid) || err != nil {
return "", err
}
// variant bits
uuid[8] = uuid[8]&^0xc0 | 0x80
// version 4 (pseudo-random)
uuid[6] = uuid[6]&^0xf0 | 0x40
return fmt.Sprintf("%x-%x-%x-%x-%x", uuid[0:4], uuid[4:6], uuid[6:8], uuid[8:10], uuid[10:]), nil
}
func Getmd5(uuid string) string {
data := []byte(uuid)
//md5_buffer := fmt.Sprintf("%x", md5.Sum(data))
md5_buffer := md5.Sum(data)
return fmt.Sprintf("{0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x,0x%x};\n", md5_buffer[0:1],
md5_buffer[1:2], md5_buffer[2:3], md5_buffer[3:4], md5_buffer[4:5], md5_buffer[5:6], md5_buffer[6:7], md5_buffer[7:8],
md5_buffer[8:9], md5_buffer[9:10], md5_buffer[10:11], md5_buffer[11:12], md5_buffer[12:13], md5_buffer[13:14], md5_buffer[14:15],
md5_buffer[15:16])
}

Read text file into string array (and write)

The ability to read (and write) a text file into and out of a string array is I believe a fairly common requirement. It is also quite useful when starting with a language removing the need initially to access a database. Does one exist in Golang?
e.g.
func ReadLines(sFileName string, iMinLines int) ([]string, bool) {
and
func WriteLines(saBuff[]string, sFilename string) (bool) {
I would prefer to use an existing one rather than duplicate.
As of Go1.1 release, there is a bufio.Scanner API that can easily read lines from a file. Consider the following example from above, rewritten with Scanner:
package main
import (
"bufio"
"fmt"
"log"
"os"
)
// readLines reads a whole file into memory
// and returns a slice of its lines.
func readLines(path string) ([]string, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
defer file.Close()
var lines []string
scanner := bufio.NewScanner(file)
for scanner.Scan() {
lines = append(lines, scanner.Text())
}
return lines, scanner.Err()
}
// writeLines writes the lines to the given file.
func writeLines(lines []string, path string) error {
file, err := os.Create(path)
if err != nil {
return err
}
defer file.Close()
w := bufio.NewWriter(file)
for _, line := range lines {
fmt.Fprintln(w, line)
}
return w.Flush()
}
func main() {
lines, err := readLines("foo.in.txt")
if err != nil {
log.Fatalf("readLines: %s", err)
}
for i, line := range lines {
fmt.Println(i, line)
}
if err := writeLines(lines, "foo.out.txt"); err != nil {
log.Fatalf("writeLines: %s", err)
}
}
Note: ioutil is deprecated as of Go 1.16.
If the file isn't too large, this can be done with the ioutil.ReadFile and strings.Split functions like so:
content, err := ioutil.ReadFile(filename)
if err != nil {
//Do something
}
lines := strings.Split(string(content), "\n")
You can read the documentation on ioutil and strings packages.
Cannot update first answer.
Anyway, after Go1 release, there are some breaking changes, so I updated as shown below:
package main
import (
"os"
"bufio"
"bytes"
"io"
"fmt"
"strings"
)
// Read a whole file into the memory and store it as array of lines
func readLines(path string) (lines []string, err error) {
var (
file *os.File
part []byte
prefix bool
)
if file, err = os.Open(path); err != nil {
return
}
defer file.Close()
reader := bufio.NewReader(file)
buffer := bytes.NewBuffer(make([]byte, 0))
for {
if part, prefix, err = reader.ReadLine(); err != nil {
break
}
buffer.Write(part)
if !prefix {
lines = append(lines, buffer.String())
buffer.Reset()
}
}
if err == io.EOF {
err = nil
}
return
}
func writeLines(lines []string, path string) (err error) {
var (
file *os.File
)
if file, err = os.Create(path); err != nil {
return
}
defer file.Close()
//writer := bufio.NewWriter(file)
for _,item := range lines {
//fmt.Println(item)
_, err := file.WriteString(strings.TrimSpace(item) + "\n");
//file.Write([]byte(item));
if err != nil {
//fmt.Println("debug")
fmt.Println(err)
break
}
}
/*content := strings.Join(lines, "\n")
_, err = writer.WriteString(content)*/
return
}
func main() {
lines, err := readLines("foo.txt")
if err != nil {
fmt.Println("Error: %s\n", err)
return
}
for _, line := range lines {
fmt.Println(line)
}
//array := []string{"7.0", "8.5", "9.1"}
err = writeLines(lines, "foo2.txt")
fmt.Println(err)
}
You can use os.File (which implements the io.Reader interface) with the bufio package for that. However, those packages are build with fixed memory usage in mind (no matter how large the file is) and are quite fast.
Unfortunately this makes reading the whole file into the memory a bit more complicated. You can use a bytes.Buffer to join the parts of the line if they exceed the line limit. Anyway, I recommend you to try to use the line reader directly in your project (especially if do not know how large the text file is!). But if the file is small, the following example might be sufficient for you:
package main
import (
"os"
"bufio"
"bytes"
"fmt"
)
// Read a whole file into the memory and store it as array of lines
func readLines(path string) (lines []string, err os.Error) {
var (
file *os.File
part []byte
prefix bool
)
if file, err = os.Open(path); err != nil {
return
}
reader := bufio.NewReader(file)
buffer := bytes.NewBuffer(make([]byte, 1024))
for {
if part, prefix, err = reader.ReadLine(); err != nil {
break
}
buffer.Write(part)
if !prefix {
lines = append(lines, buffer.String())
buffer.Reset()
}
}
if err == os.EOF {
err = nil
}
return
}
func main() {
lines, err := readLines("foo.txt")
if err != nil {
fmt.Println("Error: %s\n", err)
return
}
for _, line := range lines {
fmt.Println(line)
}
}
Another alternative might be to use io.ioutil.ReadAll to read in the complete file at once and do the slicing by line afterwards. I don't give you an explicit example of how to write the lines back to the file, but that's basically an os.Create() followed by a loop similar to that one in the example (see main()).
func readToDisplayUsingFile1(f *os.File){
defer f.Close()
reader := bufio.NewReader(f)
contents, _ := ioutil.ReadAll(reader)
lines := strings.Split(string(contents), '\n')
}
or
func readToDisplayUsingFile1(f *os.File){
defer f.Close()
slice := make([]string,0)
reader := bufio.NewReader(f)
for{
str, err := reader.ReadString('\n')
if err == io.EOF{
break
}
slice = append(slice, str)
}

Resources