I want to get string from stdio with func (b *Reader) ReadLine() (line []byte, isPrefix bool, err error) but my code doent work correctly.
I'm leaning about golang. I want to know about how to get string from standard input with ReadLine()
I know, fmt.Scan or Scanner help me, but I want to use ReadLine()
package main
import (
"bufio"
"fmt"
"io"
"os"
"strconv"
)
var sc = bufio.NewScanner(os.Stdin)
var rd = bufio.NewReaderSize(os.Stdin, 1000000)
func nextInt() int {
sc.Scan()
i, e := strconv.Atoi(sc.Text())
if e != nil {
panic(e)
}
return i
}
func nextLine() string {
buf := make([]byte, 0, 1000000)
for {
line, isPrefix, err := rd.ReadLine()
if err == io.EOF {
break
} else if err != nil {
panic(err)
}
buf = append(buf, line...)
if !isPrefix {
break
}
}
return string(buf)
}
func main() {
var s string
var a int
s = nextLine()
a = nextInt()
fmt.Println(s)
fmt.Println(a)
}
Result
$ ./a.out
test # input
334 # input
test
334
$ cat in.txt
test
334
$ ./a.out < in.txt
panic: strconv.Atoi: parsing "": invalid syntax
goroutine 1 [running]:
main.nextInt(0xc042056088)
I expect the two output should have been same,
but when I use redirection, it didn't work and get different output.
Get rid of the scanner (you already said you prefer ReadLine()) and change your nextInt() function to call nextLine() like this:
func nextInt() int {
i, e := strconv.Atoi(nextLine())
if e != nil {
panic(e)
}
return i
}
(BTW It's not a good idea to panic on bad user input but I assume this is just a test and you wouldn't do that for production code :)
You could try not using a scanner, perhaps like this
Readline will get the number for you, just convert it
package main
import (
"bufio"
"fmt"
"io"
"os"
"strconv"
)
var rd = bufio.NewReaderSize(os.Stdin, 1000000)
func nextLine() string {
buf := make([]byte, 0, 1000000)
for {
line, isPrefix, err := rd.ReadLine()
if err == io.EOF {
break
} else if err != nil {
panic(err)
}
buf = append(buf, line...)
if !isPrefix {
break
}
}
return string(buf)
}
func main() {
var s string
var a int
s = nextLine()
fmt.Println(s)
s = nextLine()
a, e := strconv.Atoi(s)
if e != nil {
panic(e)
}
fmt.Println(a)
}
Related
instead of writing a pipe to a huge file i want to segment the stream in chunks on signal USR1. i think i got the basics working but the app just hangs and nothing happens, any clues or best practices when handling with an uncontrollable input stream and byte perfect segmentation?
package main
import (
"bufio"
"fmt"
"io"
"os"
"os/signal"
"syscall"
"time"
)
var done bool
func handle(c chan os.Signal) {
for {
sig := <-c
switch sig {
case syscall.SIGUSR1:
fmt.Println("###Sink temporarily_closed###")
done = true
case syscall.SIGUSR2:
fmt.Println("###Sink closed###")
done = true
case syscall.SIGHUP:
fmt.Println("###Sink running###")
}
}
}
func check(e error) {
if e != nil {
panic(e)
}
}
func main() {
c := make(chan os.Signal, 1)
signal.Notify(c, syscall.SIGUSR1, syscall.SIGUSR2, syscall.SIGHUP)
go handle(c)
reader := bufio.NewReaderSize(os.Stdin,1024*10)
for true {
if done {
file, err := os.Create("./temp.file")
check(err)
writer := bufio.NewWriter(file)
written, err := io.Copy(writer,reader)
check(err)
fmt.Println(written)
writer.Flush()
file.Close()
reader.Reset(os.Stdin)
done = false
}
time.Sleep(time.Millisecond)
}
}
So you need to io.CopyN(dst, src, 4096) in the loop and rotate the file once in a while. See example. I made rotation by size but it is easy to add signal handling.
package main
import (
"fmt"
"io"
"log"
"os"
"time"
)
var count int
var f *os.File
func rotate() *os.File {
if f != nil {
if err := f.Close(); err != nil {
log.Fatal(err)
}
}
fname := fmt.Sprintf("./dump-%d.bin", count)
count++
f, err := os.Create(fname)
if err != nil {
log.Fatal(err)
}
log.Println("rotated:", fname)
return f
}
func main() {
var n, written int
reader := os.Stdin
for {
if written == 0 || written >= 4096*10 {
f = rotate()
written = 0
}
n, err := io.CopyN(f, reader, 4096)
if err != nil {
log.Fatal(err)
}
written += n
log.Println("written:", written)
time.Sleep(time.Millisecond * 500)
}
}
I'm trying to write a wrapper for Go's built-in logger.
This is to have compatibility.
package main
import (
"log"
"os"
)
var(
mylog *log.Logger
)
func main() {
mylog = log.New(os.Stdout, "", 0)
mylog.Printf("test")
}
Instead of using os.Stdout, I want to create one something. Similar to os.Stdout but prints with prefix like below.
package main
import(
"log"
"mylibrary"
)
var(
mylog *log.Logger
)
func main() {
mylog = log.New(mylibrary.Prefix, "", 0)
mylog.Printf("test")
}
Basically, I still want to have *log.Logger while having custom log. Can someone give me a hint how I can make this works?
Currently, I'm using following to do that. But I bet there's a better way.
func NewIoWriter(f func(string)) *io.PipeWriter {
r, w := io.Pipe()
go func() {
scanner := bufio.NewScanner(r)
for scanner.Scan() {
f(scanner.Text())
}
if err := scanner.Err(); err != nil {
f(err.Error())
}
r.Close()
}()
runtime.SetFinalizer(w, (*io.PipeWriter).Close)
return w
}
What would be the better way to make it work?
Thank you
How about something like this:
type myLogWriter struct {
logFunc func(string)
line string
}
func (w *myLogWriter) Write(b []byte) (int, error) {
l := len(b)
for len(b) != 0 {
i := bytes.Index(b, []byte{'\n'})
if i == -1 {
w.line += string(b)
break
} else {
w.logFunc(w.line + string(b[:i]))
b = b[i+1:]
w.line = ""
}
}
return l, nil
}
func NewLogWriter(f func(string)) *myLogWriter {
return &myLogWriter{
logFunc: f,
}
}
See https://play.golang.org/p/L6PG1gCK1er.
I have folder with .go files and functions defined inside them.
Is it possible to list in command line all function declarations in current folder, probably with godoc?
godoc list functions /path/to/fileOrFolder
To have such output:
func Foo(a, b int) int
func Bar(c, d int) int
Definitely Peter's answer is very much sufficient, but if you want to go down the rabbit hole... and for the fun of it. Using powers of golang std lib ast.
package main
import (
"fmt"
"go/ast"
"go/parser"
"go/token"
"io/ioutil"
"log"
"os"
)
func main() {
// read file
// here you can filepath.Walk() for your go files
gopath := os.ExpandEnv("$GOPATH")
fname := gopath + "/src/github.com/golang/protobuf/proto/lib.go"
// read file
file, err := os.Open(fname)
if err != nil {
log.Println(err)
return
}
defer file.Close()
// read the whole file in
srcbuf, err := ioutil.ReadAll(file)
if err != nil {
log.Println(err)
return
}
src := string(srcbuf)
// file set
fset := token.NewFileSet()
f, err := parser.ParseFile(fset, "lib.go", src, 0)
if err != nil {
log.Println(err)
return
}
// main inspection
ast.Inspect(f, func(n ast.Node) bool {
switch fn := n.(type) {
// catching all function declarations
// other intersting things to catch FuncLit and FuncType
case *ast.FuncDecl:
fmt.Print("func ")
// if a method, explore and print receiver
if fn.Recv != nil {
fmt.Printf("(%s)", fields(*fn.Recv))
}
// print actual function name
fmt.Printf("%v", fn.Name)
// print function parameters
if fn.Type.Params != nil {
fmt.Printf("(%s)", fields(*fn.Type.Params))
}
// print return params
if fn.Type.Results != nil {
fmt.Printf("(%s)", fields(*fn.Type.Results))
}
fmt.Println()
}
return true
})
}
func expr(e ast.Expr) (ret string) {
switch x := e.(type) {
case *ast.StarExpr:
return fmt.Sprintf("%s*%v", ret, x.X)
case *ast.Ident:
return fmt.Sprintf("%s%v", ret, x.Name)
case *ast.ArrayType:
if x.Len != nil {
log.Println("OH OH looks like homework")
return "TODO: HOMEWORK"
}
res := expr(x.Elt)
return fmt.Sprintf("%s[]%v", ret, res)
case *ast.MapType:
return fmt.Sprintf("map[%s]%s", expr(x.Key), expr(x.Value))
case *ast.SelectorExpr:
return fmt.Sprintf("%s.%s", expr(x.X), expr(x.Sel))
default:
fmt.Printf("\nTODO HOMEWORK: %#v\n", x)
}
return
}
func fields(fl ast.FieldList) (ret string) {
pcomma := ""
for i, f := range fl.List {
// get all the names if present
var names string
ncomma := ""
for j, n := range f.Names {
if j > 0 {
ncomma = ", "
}
names = fmt.Sprintf("%s%s%s ", names, ncomma, n)
}
if i > 0 {
pcomma = ", "
}
ret = fmt.Sprintf("%s%s%s%s", ret, pcomma, names, expr(f.Type))
}
return ret
}
Building on #Peter's idea on an approach, you extract out all the exported functions using a simple grep + regex like this:
grep -rP '^func\s(?:\([^\)]+\)\s)?[A-Z].*' *.go
I'm trying to parse a large image dataset. I'm using filepath.Walk ]and processing each file I find there. I'd like the filepath.
package main
import (
"fmt"
"image/color"
"image/png"
"math/rand"
"os"
)
var (
Black = color.Gray{0}
)
func getRandFloatNumber(min, max float32) float32 {
return (rand.Float32()*2 - min) * max
}
func openImage(path string, info os.FileInfo, err error) error {
infile, _ := os.Open(path)
defer infile.Close()
img, err := png.Decode(infile)
if err != nil {
return nil
}
array := make([]float32, 128*128)
for y := 0; y < 128; y++ {
for x := 0; x < 128; x++ {
c := color.GrayModel.Convert(img.At(x, y)).(color.Gray)
if c == Black {
array[x*y] = getRandFloatNumber(0.7, 0.95)
} else {
array[x*y] = getRandFloatNumber(0.1, 0.25)
}
}
}
fmt.Println(info.Name())
return nil
}
How to run openImage as a gorutine?
Or how to optimize this code?
You can't get filepath.Walk to call your function in a goroutine, but you can simply start a goroutine in your WalkFunc.
package main
import (
"os"
"path/filepath"
)
func main() {
filepath.Walk("/my/dir", func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
// Check more criteria if necessary. Also consider limiting the number
// of concurrent goroutines.
go openImage(path, info)
return nil
})
}
func openImage(path string, info os.FileInfo) {
}
I a task written in Go to get a unique list from a bunch of text files. I put in some parallelization using channels and am having inconsistent results now - a variance of 5 records output/not output each time with the same input files.
The am testing it with go run process.go | wc -l on Fedora x86_64, go1.1.2, 8 core amd.
The code is:
package main
import (
"fmt"
"os"
"io"
"encoding/csv"
"regexp"
"log"
)
var (
cleanRe *regexp.Regexp = regexp.MustCompile("[^0-9]+")
comma rune ='\t'
fieldsPerRecord=-1
)
func clean(s string) string {
clean:=cleanRe.ReplaceAllLiteralString(s,"")
if len(clean)<6 {return ""}
return clean
}
func uniqueChannel(inputChan chan []string, controlChan chan string) {
defer func(){controlChan<-"Input digester."}()
uniq:=make(map[string]map[string]bool)
i:=0
for record:= range inputChan {
i++
id,v:=record[0],record[1]
if uniq[id]==nil {
uniq[id]=make(map[string]bool)
} else if !uniq[id][v] {
uniq[id][v]=true
fmt.Println(id,string(comma),v)
}
}
log.Println("digest ", i)
}
func processFile(fileName string, outputChan chan []string, controlChan chan string) {
defer func(){controlChan<-fileName}()
f,err:=os.Open(fileName)
if err!=nil{log.Fatal(err)}
r:=csv.NewReader(f)
r.FieldsPerRecord = fieldsPerRecord
r.Comma = comma
// Process the records
i:=0
for record,err:=r.Read();err!=io.EOF;record,err=r.Read() {
if err!=nil{continue}
id:=record[0]
for _,v:=range record[1:] {
if cleanV:=clean(v);cleanV!=""{
i++
outputChan<-[]string{id,cleanV}
}
}
}
log.Println(fileName,i)
}
func main() {
inputs:=[]string{}
recordChan:=make(chan []string,100)
processesLeft:=len(inputs)+1
controlChan:=make(chan string,processesLeft)
// Ingest the inputs
for _,fName:=range inputs {
go processFile(fName,recordChan,controlChan)
}
// This is the loop to ensure it's all unique
go uniqueChannel(recordChan,controlChan)
// Make sure all the channels close up
for processesLeft>0 {
if processesLeft==1{
close(recordChan)
}
c:=<-controlChan
log.Println(c)
processesLeft--
}
close(controlChan)
}
It seems like it closes the channel before it's empty and quite. Without the closing mechanism I was getting deadlocks - I'm out of ideas.
You could ditch the control channel and use a sync.WaitGroup:
package main
import (
"encoding/csv"
"fmt"
"io"
"log"
"os"
"regexp"
"sync"
)
var (
cleanRe *regexp.Regexp = regexp.MustCompile("[^0-9]+")
comma rune = '\t'
fieldsPerRecord = -1
)
func clean(s string) string {
clean := cleanRe.ReplaceAllLiteralString(s, "")
if len(clean) < 6 {
return ""
}
return clean
}
func uniqueChannel(inputChan chan []string) {
uniq := make(map[string]map[string]bool)
i := 0
for record := range inputChan {
i++
id, v := record[0], record[1]
if uniq[id] == nil {
uniq[id] = make(map[string]bool)
} else if !uniq[id][v] {
uniq[id][v] = true
fmt.Println(id, string(comma), v)
}
}
log.Println("digest ", i)
}
func processFile(fileName string, outputChan chan []string) {
f, err := os.Open(fileName)
if err != nil {
log.Fatal(err)
}
r := csv.NewReader(f)
r.FieldsPerRecord = fieldsPerRecord
r.Comma = comma
// Process the records
for record, err := r.Read(); err != io.EOF; record, err = r.Read() {
if err != nil {
continue
}
id := record[0]
for _, v := range record[1:] {
if cleanV := clean(v); cleanV != "" {
outputChan <- []string{id, cleanV}
}
}
}
}
func main() {
inputs := []string{"ex.tsv"}
recordChan := make(chan []string)
var wg sync.WaitGroup
// Ingest the inputs
for _, fName := range inputs {
wg.Add(1)
go func() {
processFile(fName, recordChan)
wg.Done()
}()
}
go func() {
wg.Wait()
close(recordChan)
}()
// This is the loop to ensure it's all unique
uniqueChannel(recordChan)
}