Go is not writing complete data to text file - go

I am trying to explore Go concurrency. Here Grabber() prints and writes the result of the execution. The program prints the expected result, but does not write it to urls.txt. Can anyone explain to me what i am missing here?
main.go
package main
import (
"bufio"
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"regexp"
"strings"
"sync"
)
var wg sync.WaitGroup
var mt sync.Mutex
// Final Literation
func main() {
file, err := os.Open("ip.txt")
if err != nil {
log.Fatal(err)
}
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
go Grabber(scanner.Text())
wg.Add(1)
}
wg.Wait()
if err := scanner.Err(); err != nil {
log.Fatal(err)
}
}
// stringInArray do If string in list return true false otherwise.
func stringInArray(a string, list []string) bool {
for _, b := range list {
if b == a {
return true
}
}
return false
}
// Grabber Do Search the bing and collect array of sitelist
func Grabber(ip string) {
defer wg.Done()
var output []string
outfile, err := os.Create("urls.txt")
if err != nil {
log.Fatal(err)
}
defer outfile.Close()
if ip == "" {
}
page := 1
for page < 251 {
client := &http.Client{}
req, err := http.NewRequest(
http.MethodGet,
fmt.Sprintf(
"http://www.bing.com/search?q=ip:%s+&count=50&first=1",
ip,
),
nil,
)
if err != nil {
}
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 6.1; rv:57.0) Gecko/20100101 Firefox/57.0")
res, err := client.Do(req)
if err != nil {
fmt.Println("Invalid Request")
}
defer res.Body.Close()
body, err := ioutil.ReadAll(res.Body)
if err != nil {
fmt.Println("Couldn't Read")
}
re := regexp.MustCompile(`<h2><a href="(.*?)"`)
links := re.FindAllString(string(body), -1)
if links != nil {
for l := range links {
o := strings.Split(links[l], `"`)
d := strings.Split(o[1], "/")
s := d[0] + "//" + d[2]
if !stringInArray(s, output) {
output = append(output, s)
}
}
}
page = page + 50
}
for _, links := range output {
fmt.Println(links)
fmt.Fprintln(outfile, links)
}
}
Ip.txt as input
103.253.145.129
103.253.146.125
103.253.146.239
103.253.147.72
146.185.176.79
146.185.176.45
146.185.179.250
146.185.180.35
146.185.180.185
146.185.180.113
146.185.181.51
146.185.183.107
146.185.183.202
146.185.183.248
146.185.183.219
146.185.184.69
146.185.185.169
git repo URLGrabber

You are calling create in each goroutine, which will truncate the file. Instead, create the file outside, and serialize the writes to it using another goroutine:
outfile, err := os.Create("urls.txt")
results:=make(chan []string)
go func() {
for output:=range results {
for _, links := range output {
fmt.Fprintln(outfile, links)
}
}
}()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
go Grabber(scanner.Text(), results)
wg.Add(1)
}
wg.Wait()
close(results)
When you get the results in Grabber, instead of writing it to the file, write it to the channel:
results<-output
for _, links := range output {
fmt.Println(links)
}

Related

go use fsnotify watch file ischanged not working

I use the library (http://github.com/fsnotify/fsnotify) to monitor the file system.
I m trying to adjust the repository example to match my requirements, but when i do so the program is not working anymore.
I commented the done channel within the ExampleNewWatcher function
done := make(chan bool)
<-done
As a result, now when i run the example, this channel does not output anything anymore.
event, ok := <-watcher.Events
Complete code:
package main
import (
"github.com/fsnotify/fsnotify"
"log"
"os"
"strconv"
"time"
)
func ExampleNewWatcher() {
watcher, err := fsnotify.NewWatcher()
if err != nil {
log.Fatal(err)
}
defer watcher.Close()
done := make(chan bool) // if i commen this line the `panic` not norking
go func() {
for {
select {
case event, ok := <-watcher.Events:
if !ok {
return
}
log.Println("event:", event)
if event.Op&fsnotify.Write == fsnotify.Write {
log.Println("modified file:", event.Name)
}
panic("just for test") // new output this line
case err, ok := <-watcher.Errors:
if !ok {
return
}
log.Println("error:", err)
}
}
}()
err = watcher.Add("/tmp/foo")
if err != nil {
log.Fatal(err)
}
<-done // comment this
}
func check(err error) {
if err != nil {
panic(err)
}
}
func main() {
// create the test file
var err error
file, err := os.Create("/tmp/foo")
check(err)
_, err = file.Write([]byte("hello world"))
check(err)
stopchan := make(chan struct{})
// test change file
go func() {
for i := 0; i < 10; i++ {
file1, err := os.OpenFile("/tmp/foo", os.O_RDWR, 0644)
check(err)
d := strconv.Itoa(i) + "hello world"
_, err = file1.Write([]byte(d))
check(err)
err = file1.Close()
check(err)
time.Sleep(2 * time.Second) // wait the context writed to the file
}
}()
ExampleNewWatcher() // monitor file
stopchan <- struct{}{}
}

Client stuck when trying to read with io.CopyN() in golang

I am trying to make TCP server for transferring files. I am suing io.CopyN for reading and writing. From server side, I am sending files to client so from server side, it sends perfectly all bytes but Client side after reading a couple of 1000000 bytes it stuck. sometimes it works fine and sometimes it gets stuck. I am using 300 MB pdf to test. Any help, code, and output is like below.
server
package main
import (
"fmt"
"io"
"log"
"net"
"os"
"strconv"
"strings"
)
func main() {
ls, err := net.Listen("tcp", ":1234")
errFunc(err)
defer ls.Close()
conn, _ := ls.Accept()
defer conn.Close()
for {
file, err := os.Open(strings.TrimSpace("./" + "Mag" + ".pdf"))
errFunc(err)
defer file.Close()
fileInfo, err := file.Stat()
errFunc(err)
size := fileInfo.Size()
numberOfTime := size / 1000000
leftByte := size - numberOfTime*1000000
numberOfTimeString := strconv.Itoa(int(numberOfTime))
leftByteString := strconv.Itoa(int(leftByte))
fmt.Println("1000000 times : ", numberOfTimeString)
fmt.Println("Left Bytes : ", leftByteString)
_, err = fmt.Fprintf(conn, numberOfTimeString+"\n")
errFunc(err)
_, err = fmt.Fprintf(conn, leftByteString+"\n")
errFunc(err)
fileWriter := io.Writer(conn)
for i := 0; i < int(numberOfTime); i++ {
n, err := io.CopyN(conn, file, 1000000)
if i >= 30 {
fmt.Println(err, n)
}
}
n, err := io.CopyN(fileWriter, file, leftByte+1)
if err == io.EOF {
fmt.Println(err, n)
}
fmt.Printf("Succefully bytes sent : %v \n\n\n\n\n", n)
file.Close()
}
}
func errFunc(err error) {
if err != nil {
log.Fatal(err)
}
}
client
package main
import (
"bufio"
"fmt"
"io"
"net"
"os"
"os/signal"
"strconv"
"strings"
"syscall"
)
func main() {
c := make(chan os.Signal, 15)
signal.Notify(c, syscall.SIGINT)
go func() {
for {
s := <-c
switch s {
case syscall.SIGINT:
os.Exit(1)
}
}
}()
conn, _ := net.Dial("tcp", ":1234")
defer conn.Close()
connReadWrite := bufio.NewReader(io.Reader(conn))
var i int
var filename string
for {
i++
nu := strconv.Itoa(i)
filename = "image" + nu + ".pdf"
file, err := os.Create(filename)
defer file.Close()
numberOfTimeString, err := connReadWrite.ReadString('\n')
if err != nil {
fmt.Println(err)
}
println("1000000 times :", numberOfTimeString)
numberOfTimeString = strings.TrimSuffix(numberOfTimeString, "\n")
numberOfTime, err := strconv.Atoi(numberOfTimeString)
if err != nil {
fmt.Println(err)
}
leftByteString, err := connReadWrite.ReadString('\n')
if err != nil {
println(err)
}
println("Left Bytes :", leftByteString)
leftByteString = strings.TrimSuffix(leftByteString, "\n")
leftByte, err := strconv.Atoi(leftByteString)
if err != nil {
panic(err)
}
fmt.Println("After convert in Num :", numberOfTime, leftByte)
newFileWriter := io.Writer(file)
newFileReader := io.Reader(conn)
for i := 0; i < numberOfTime; i++ {
n, err := io.CopyN(newFileWriter, newFileReader, 1000000)
if i >= 30 {
errFun(err, n)
}
}
n, err := io.CopyN(newFileWriter, newFileReader, int64(leftByte))
errFun(err, n)
fmt.Printf("sucessfully Transfered ---> \n\n\n\n\n\n")
}
}
func errFun(err error, n int64) {
if err == io.EOF {
fmt.Println("End of file : ", n)
return
} else if n == 0 {
fmt.Println("n is : ", n)
return
} else if err != nil {
fmt.Println(err)
return
}
fmt.Println(err, " : ", n)
}
input/output
from server side first we are sending number of bytes it need to readand then client side it gets a number of bytes it needs to read and then I am sending the file and then it read. In the picture, I was able to send one-time second time it got stuck sometimes it stuck first time too.I am able to send number of byte from server side second time too but as you can see it don't read that numeber, it read something "%PDF..." and it even don't print "100000 times : " correctly it prints "%???00 times :" I just don’t understand this
enter image description here
I believe the issue is that you're using a bytes.Buffer in the client:
connReadWrite := bufio.NewReader(io.Reader(conn))
But you aren't using it later with the CopyN:
newFileWriter := io.Writer(file)
newFileReader := io.Reader(conn)
for i := 0; i < numberOfTime; i++ {
_, err := io.CopyN(newFileWriter, newFileReader, 1000000)
if err != nil {
log.Fatalln(err)
}
}
Using:
newFileWriter := io.Writer(file)
for i := 0; i < numberOfTime; i++ {
_, err := io.CopyN(file, connReadWrite, 1000000)
if err != nil {
log.Fatalln(err)
}
}
May fix it.
If you have control over the protocol you are using to send the file, I recommend doing something simpler. For example using the big-endian int64 length prefix.
Send:
func sendFile(name string, conn net.Conn) error {
f, err := os.Open(name)
if err != nil {
return err
}
defer f.Close()
fi, err := f.Stat()
if err != nil {
return err
}
sz := fi.Size()
buf := bufio.NewWriter(conn)
err = binary.Write(buf, binary.BigEndian, sz)
if err != nil {
return err
}
_, err = io.CopyN(buf, f, sz)
if err != nil {
return err
}
return buf.Flush()
}
Receive:
func recvFile(name string, conn net.Conn) error {
f, err := os.Create(name)
if err != nil {
return err
}
defer f.Close()
buf := bufio.NewReader(conn)
var sz int64
err = binary.Read(buf, binary.BigEndian, &sz)
if err != nil {
return err
}
_, err = io.CopyN(f, buf, sz)
if err != nil {
return err
}
return nil
}

No output to error file

I'm coding a little Go program.
It reads files in a directory line by line, it only reads lines with a certain prefix, normalizes the data and outputs to one of two files, depending on whether the normalized record has certain number of elements.
Data is being outputted to the Data file, but errors are not being outputted to the Errors file.
Debugging I see no issue.
Any help is much appreciated.
Thanks,
Martin
package main
import (
"bufio"
"fmt"
"io/ioutil"
"log"
"os"
"strings"
)
func main() {
//Output file - Data
if _, err := os.Stat("allData.txt"); os.IsNotExist(err) {
var file, err = os.Create("allData.txt")
if err != nil {
fmt.Println(err)
return
}
defer file.Close()
}
file, err := os.OpenFile("allData.txt", os.O_WRONLY|os.O_APPEND, 0644)
if err != nil {
panic(err)
}
w := bufio.NewWriter(file)
//Output file - Errors
if _, err := os.Stat("errorData.txt"); os.IsNotExist(err) {
var fileError, err = os.Create("errorData.txt")
if err != nil {
fmt.Println(err)
return
}
defer fileError.Close()
}
fileError, err := os.OpenFile("errorData.txt", os.O_WRONLY|os.O_APPEND, 0644)
if err != nil {
panic(err)
}
z := bufio.NewWriter(fileError)
//Read Directory
files, err := ioutil.ReadDir("../")
if err != nil {
log.Fatal(err)
}
//Build file path
for _, f := range files {
fName := string(f.Name())
sPath := string("../" + fName)
sFile, err := os.Open(sPath)
if err != nil {
fmt.Println(err)
return
}
//Create scanner
scanner := bufio.NewScanner(sFile)
scanner.Split(bufio.ScanLines)
var lines []string
// This is the buffer now
for scanner.Scan() {
lines = append(lines, scanner.Text())
}
for _, line := range lines {
sRecordC := strings.HasPrefix((line), "DATA:")
if sRecordC {
splitted := strings.Split(line, " ")
splittedNoSpaces := deleteEmpty(splitted)
if len(splittedNoSpaces) == 11 {
splittedString := strings.Join(splittedNoSpaces, " ")
sFinalRecord := string(splittedString + "\r\n")
if _, err = fmt.Fprintf(w, sFinalRecord); err != nil {
}
}
if len(splittedNoSpaces) < 11 {
splitted := strings.Split(line, " ")
splittedNoSpaces := deleteEmpty(splitted)
splittedString := strings.Join(splittedNoSpaces, " ")
sFinalRecord := string(splittedString + "\r\n")
if _, err = fmt.Fprintf(z, sFinalRecord); err != nil {
}
err = fileError.Sync()
if err != nil {
log.Fatal(err)
}
}
}
}
}
err = file.Sync()
if err != nil {
log.Fatal(err)
}
}
//Delete Empty array elements
func deleteEmpty(s []string) []string {
var r []string
for _, str := range s {
if str != "" {
r = append(r, str)
}
}
return r
}
Don't open the file multiple times, and don't check for the file's existence before creating it, just use the os.O_CREATE flag. You're also not deferring the correct os.File.Close call, because it's opened multiple times.
When using a bufio.Writer, you should always call Flush() to ensure that all data has been written to the underlying io.Writer.

golang sync.WaitGroup never completes

I have the below code that fetches a list of URL's and then conditionally downloads a file and saves it to the filesystem. The files are fetched concurrently and the main goroutine waits for all the files to be fetched. But, the program never exits (and there are no errors) after completing all the requests.
What I think is happening is that somehow the amount of go routines in the WaitGroup is either incremented by too many to begin with (via Add) or not decremented by enough (a Done call is not happening).
Is there something I am obviously doing wrong? How would I inspect how many go routines are presently in the WaitGroup so I can better debug what's happening?
package main
import (
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"strings"
"sync"
)
func main() {
links := parseLinks()
var wg sync.WaitGroup
for _, url := range links {
if isExcelDocument(url) {
wg.Add(1)
go downloadFromURL(url, wg)
} else {
fmt.Printf("Skipping: %v \n", url)
}
}
wg.Wait()
}
func downloadFromURL(url string, wg sync.WaitGroup) error {
tokens := strings.Split(url, "/")
fileName := tokens[len(tokens)-1]
fmt.Printf("Downloading %v to %v \n", url, fileName)
content, err := os.Create("temp_docs/" + fileName)
if err != nil {
fmt.Printf("Error while creating %v because of %v", fileName, err)
return err
}
resp, err := http.Get(url)
if err != nil {
fmt.Printf("Could not fetch %v because %v", url, err)
return err
}
defer resp.Body.Close()
_, err = io.Copy(content, resp.Body)
if err != nil {
fmt.Printf("Error while saving %v from %v", fileName, url)
return err
}
fmt.Printf("Download complete for %v \n", fileName)
defer wg.Done()
return nil
}
func isExcelDocument(url string) bool {
return strings.HasSuffix(url, ".xlsx") || strings.HasSuffix(url, ".xls")
}
func parseLinks() []string {
linksData, err := ioutil.ReadFile("links.txt")
if err != nil {
fmt.Printf("Trouble reading file: %v", err)
}
links := strings.Split(string(linksData), ", ")
return links
}
There are two problems with this code. First, you have to pass a pointer to the WaitGroup to downloadFromURL(), otherwise the object will be copied and Done() will not be visible in main().
See:
func main() {
...
go downloadFromURL(url, &wg)
...
}
Second, defer wg.Done() should be one of the first statements in downloadFromURL(), otherwise if you return from the function before that statement, it won't get "registered" and won't get called.
func downloadFromURL(url string, wg *sync.WaitGroup) error {
defer wg.Done()
...
}
Arguments in Go are always passed by value. Use a pointer when an argument may be modified. Also, make sure that you always execute wg.Done().For example,
package main
import (
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"strings"
"sync"
)
func main() {
links := parseLinks()
wg := new(sync.WaitGroup)
for _, url := range links {
if isExcelDocument(url) {
wg.Add(1)
go downloadFromURL(url, wg)
} else {
fmt.Printf("Skipping: %v \n", url)
}
}
wg.Wait()
}
func downloadFromURL(url string, wg *sync.WaitGroup) error {
defer wg.Done()
tokens := strings.Split(url, "/")
fileName := tokens[len(tokens)-1]
fmt.Printf("Downloading %v to %v \n", url, fileName)
content, err := os.Create("temp_docs/" + fileName)
if err != nil {
fmt.Printf("Error while creating %v because of %v", fileName, err)
return err
}
resp, err := http.Get(url)
if err != nil {
fmt.Printf("Could not fetch %v because %v", url, err)
return err
}
defer resp.Body.Close()
_, err = io.Copy(content, resp.Body)
if err != nil {
fmt.Printf("Error while saving %v from %v", fileName, url)
return err
}
fmt.Printf("Download complete for %v \n", fileName)
return nil
}
func isExcelDocument(url string) bool {
return strings.HasSuffix(url, ".xlsx") || strings.HasSuffix(url, ".xls")
}
func parseLinks() []string {
linksData, err := ioutil.ReadFile("links.txt")
if err != nil {
fmt.Printf("Trouble reading file: %v", err)
}
links := strings.Split(string(linksData), ", ")
return links
}
As #Bartosz mentioned, you will need to pass a reference to your WaitGroup object. He did a great job discussing the importance of defer ws.Done()
I like WaitGroup's simplicity. However, I do not like that we need to pass the reference to the goroutine because that would mean that the concurrency logic would be mixed with your business logic.
So I came up with this generic function to solve this problem for me:
// Parallelize parallelizes the function calls
func Parallelize(functions ...func()) {
var waitGroup sync.WaitGroup
waitGroup.Add(len(functions))
defer waitGroup.Wait()
for _, function := range functions {
go func(copy func()) {
defer waitGroup.Done()
copy()
}(function)
}
}
So your example could be solved this way:
func main() {
links := parseLinks()
functions := []func(){}
for _, url := range links {
if isExcelDocument(url) {
function := func(url string){
return func() { downloadFromURL(url) }
}(url)
functions = append(functions, function)
} else {
fmt.Printf("Skipping: %v \n", url)
}
}
Parallelize(functions...)
}
func downloadFromURL(url string) {
...
}
If you would like to use it, you can find it here https://github.com/shomali11/util

Reading CSV file in Go

Here is a code snippet that reads CSV file:
func parseLocation(file string) (map[string]Point, error) {
f, err := os.Open(file)
defer f.Close()
if err != nil {
return nil, err
}
lines, err := csv.NewReader(f).ReadAll()
if err != nil {
return nil, err
}
locations := make(map[string]Point)
for _, line := range lines {
name := line[0]
lat, laterr := strconv.ParseFloat(line[1], 64)
if laterr != nil {
return nil, laterr
}
lon, lonerr := strconv.ParseFloat(line[2], 64)
if lonerr != nil {
return nil, lonerr
}
locations[name] = Point{lat, lon}
}
return locations, nil
}
Is there a way to improve readability of this code? if and nil noise.
Go now has a csv package for this. Its is encoding/csv. You can find the docs here: https://golang.org/pkg/encoding/csv/
There are a couple of good examples in the docs. Here is a helper method I created to read a csv file and returns its records.
package main
import (
"encoding/csv"
"fmt"
"log"
"os"
)
func readCsvFile(filePath string) [][]string {
f, err := os.Open(filePath)
if err != nil {
log.Fatal("Unable to read input file " + filePath, err)
}
defer f.Close()
csvReader := csv.NewReader(f)
records, err := csvReader.ReadAll()
if err != nil {
log.Fatal("Unable to parse file as CSV for " + filePath, err)
}
return records
}
func main() {
records := readCsvFile("../tasks.csv")
fmt.Println(records)
}
Go is a very verbose language, however you could use something like this:
// predeclare err
func parseLocation(file string) (locations map[string]*Point, err error) {
f, err := os.Open(file)
if err != nil {
return nil, err
}
defer f.Close() // this needs to be after the err check
lines, err := csv.NewReader(f).ReadAll()
if err != nil {
return nil, err
}
//already defined in declaration, no need for :=
locations = make(map[string]*Point, len(lines))
var lat, lon float64 //predeclare lat, lon
for _, line := range lines {
// shorter, cleaner and since we already have lat and err declared, we can do this.
if lat, err = strconv.ParseFloat(line[1], 64); err != nil {
return nil, err
}
if lon, err = strconv.ParseFloat(line[2], 64); err != nil {
return nil, err
}
locations[line[0]] = &Point{lat, lon}
}
return locations, nil
}
//edit
A more efficient and proper version was posted by #Dustin in the comments, I'm adding it here for completeness sake:
func parseLocation(file string) (map[string]*Point, error) {
f, err := os.Open(file)
if err != nil {
return nil, err
}
defer f.Close()
csvr := csv.NewReader(f)
locations := map[string]*Point{}
for {
row, err := csvr.Read()
if err != nil {
if err == io.EOF {
err = nil
}
return locations, err
}
p := &Point{}
if p.lat, err = strconv.ParseFloat(row[1], 64); err != nil {
return nil, err
}
if p.lon, err = strconv.ParseFloat(row[2], 64); err != nil {
return nil, err
}
locations[row[0]] = p
}
}
playground
I basically copied my answer from here: https://www.dotnetperls.com/csv-go. For me, this was a better answer than what I found on stackoverflow.
import (
"bufio"
"encoding/csv"
"os"
"fmt"
"io"
)
func ReadCsvFile(filePath string) {
// Load a csv file.
f, _ := os.Open(filePath)
// Create a new reader.
r := csv.NewReader(f)
for {
record, err := r.Read()
// Stop at EOF.
if err == io.EOF {
break
}
if err != nil {
panic(err)
}
// Display record.
// ... Display record length.
// ... Display all individual elements of the slice.
fmt.Println(record)
fmt.Println(len(record))
for value := range record {
fmt.Printf(" %v\n", record[value])
}
}
}
I also dislike the verbosity of the default Reader, so I made a new type that is
similar to bufio#Scanner:
package main
import "encoding/csv"
import "io"
type Scanner struct {
Reader *csv.Reader
Head map[string]int
Row []string
}
func NewScanner(o io.Reader) Scanner {
csv_o := csv.NewReader(o)
a, e := csv_o.Read()
if e != nil {
return Scanner{}
}
m := map[string]int{}
for n, s := range a {
m[s] = n
}
return Scanner{Reader: csv_o, Head: m}
}
func (o *Scanner) Scan() bool {
a, e := o.Reader.Read()
o.Row = a
return e == nil
}
func (o Scanner) Text(s string) string {
return o.Row[o.Head[s]]
}
Example:
package main
import "strings"
func main() {
s := `Month,Day
January,Sunday
February,Monday`
o := NewScanner(strings.NewReader(s))
for o.Scan() {
println(o.Text("Month"), o.Text("Day"))
}
}
https://golang.org/pkg/encoding/csv
You can also read contents of a directory to load all the CSV files. And then read all those CSV files 1 by 1 with goroutines
csv file:
101,300.00,11000901,1155686400
102,250.99,11000902,1432339200
main.go file:
const sourcePath string = "./source"
func main() {
dir, _ := os.Open(sourcePath)
files, _ := dir.Readdir(-1)
for _, file := range files {
fmt.Println("SINGLE FILE: ")
fmt.Println(file.Name())
filePath := sourcePath + "/" + file.Name()
f, _ := os.Open(filePath)
defer f.Close()
// os.Remove(filePath)
//func
go func(file io.Reader) {
records, _ := csv.NewReader(file).ReadAll()
for _, row := range records {
fmt.Println(row)
}
}(f)
time.Sleep(10 * time.Millisecond)// give some time to GO routines for execute
}
}
And the OUTPUT will be:
$ go run main.go
SINGLE FILE:
batch01.csv
[101 300.00 11000901 1155686400]
[102 250.99 11000902 1432339200]
----------------- -------------- ---------------------- -------
---------------- ------------------- ----------- --------------
Below example with the Invoice struct
func main() {
dir, _ := os.Open(sourcePath)
files, _ := dir.Readdir(-1)
for _, file := range files {
fmt.Println("SINGLE FILE: ")
fmt.Println(file.Name())
filePath := sourcePath + "/" + file.Name()
f, _ := os.Open(filePath)
defer f.Close()
go func(file io.Reader) {
records, _ := csv.NewReader(file).ReadAll()
for _, row := range records {
invoice := new(Invoice)
invoice.InvoiceNumber = row[0]
invoice.Amount, _ = strconv.ParseFloat(row[1], 64)
invoice.OrderID, _ = strconv.Atoi(row[2])
unixTime, _ := strconv.ParseInt(row[3], 10, 64)
invoice.Date = time.Unix(unixTime, 0)
fmt.Printf("Received invoice `%v` for $ %.2f \n", invoice.InvoiceNumber, invoice.Amount)
}
}(f)
time.Sleep(10 * time.Millisecond)
}
}
type Invoice struct {
InvoiceNumber string
Amount float64
OrderID int
Date time.Time
}

Resources