Guys I am trying pick new lines as they come from command output, but always I end up doing it synchronous way (I have to wait until script is finished). I tired to use fsnotify but it is working only with regular files, do you have any idea how it can be done ?
package main
import (
"fmt"
"os/exec"
"bytes"
"os"
)
func main() {
cmd := exec.Command("scripts/long_script")
output := new(bytes.Buffer)
cmd.Stdout = output
cmd.Stderr = output
if err := cmd.Start(); err != nil{ // after Start program is continued and script is executing in background
fmt.Printf("Failed to start " + err.Error())
os.Exit(1)
}
fmt.Printf(" Before WAIT %s \n", output.String()) // script is writing but nothing can be read from output
cmd.Wait()
fmt.Printf(" After Wait %s \n", output.String()) // if we wait to finish execution, we can read all output
}
You should use os.StdoutPipe()
func main() {
for i := 10; i < 20; i++ {
go printName(`My name is Bob, I am ` + strconv.Itoa(i) + ` years old`)
// Adding delay so as to see incremental output
time.Sleep(60 * time.Millisecond)
}
// Adding delay so as to let program complete
// Please use channels or wait groups
time.Sleep(100 * time.Millisecond)
}
func printName(jString string) {
cmd := exec.Command("echo", "-n", jString)
cmdReader, err := cmd.StdoutPipe()
if err != nil {
log.Fatal(err)
}
scanner := bufio.NewScanner(cmdReader)
go func() {
for scanner.Scan() {
fmt.Println(scanner.Text())
}
}()
if err := cmd.Start(); err != nil {
log.Fatal(err)
}
if err := cmd.Wait(); err != nil {
log.Fatal(err)
}
}
sources that helped me:
nathanleclaire.com
blog.kowalczyk.info
eventually I managed to do it with []bytes
stdout, err := cmd.StdoutPipe()
buff := make([]byte,10)
var n int
for err == nil {
n,err = stdout.Read(buff)
if n > 0{
fmt.Printf("taken %d chars %s",n,string(buff[:n]))
}
}
cmd.Wait()
if cmd.ProcessState.Success() {. // ProcessState is set after Wait
fmt.Println("Script success")
} else {
fmt.Println("Script failed")
}
Related
func main() {
//switch statement here that runs grabusernames()
}
func grabusernames() {
f, err := os.OpenFile("longlist.txt", os.O_RDONLY, os.ModePerm)
if err != nil {
log.Fatalf("open file error: %v", err)
return
}
defer f.Close()
rd := bufio.NewReader(f)
for {
line, err := rd.ReadString('\n')
line2 := strings.TrimSpace(line)
if err != nil {
if err == io.EOF {
break
}
log.Fatalf("read file line error: %v", err)
return
}
tellonym(line2)
}
}
func tellonym(line2 string) {
threads := 10
swg := sizedwaitgroup.New(threads)
for i := 0; i < 1000; i++ {
swg.Add()
go func(i int) {
defer swg.Done()
var client http.Client
resp, err := client.Get("https://tellonym.me/" + line2)
if err != nil {
log.Fatal(err)
}
defer resp.Body.Close()
//fmt.Println("Response code: ", resp.StatusCode)
if resp.StatusCode == 404 {
fmt.Println("Username" + line2 + "not taken")
} else if resp.StatusCode == 200 {
fmt.Println("username " + line2 + " taken")
} else {
fmt.Println("Something else, response code: ", resp.StatusCode)
}
}(i)
}
The issue with the code above is that it checks the same username 1,000 times
I'd like it to check each username in the longlist.txt once, but I want to concurrently do it ( it's a long list and I'd like it to be fast
Current output:
Username causenot taken
Username causenot taken
Username causenot taken
Username causenot taken
Desired output:
Username causenot taken
Username billybob taken
Username something taken
Username stacker taken
You have to use goroutines in tellonym(line2) function. In your for loop you are using same username with 1,000 times.
func main() {
//switch statement here that runs grabusernames()
}
func grabusernames() {
f, err := os.OpenFile("longlist.txt", os.O_RDONLY, os.ModePerm)
if err != nil {
log.Fatalf("open file error: %v", err)
return
}
defer f.Close()
rd := bufio.NewReader(f)
for {
line, err := rd.ReadString('\n')
line2 := strings.TrimSpace(line)
if err != nil {
if err == io.EOF {
break
}
log.Fatalf("read file line error: %v", err)
return
}
go tellonym(line2) // use go routines in here
}
}
Also take care about this details:
if you're reading from io.Reader consider it as reading from the stream. It's the single input source, which you can't 'read in parallel' because of it's nature - under the hood, you're getting byte, waiting for another one, getting one more and so on. Tokenizing it in words comes later, in buffer.
Second, I hope you're not trying to use goroutines as a 'silver bullet' in a 'let's add gouroutines and everything will just speed up' manner. If Go gives you such an easy way to use concurrency, it doesn't mean you should use it everywhere.
And finally, if you really need to split huge file into words in parallel and you think that splitting part will be the bottleneck (don't know your case, but I really doubt that) - then you have to invent your own algorithm and use 'os' package to Seek()/Read() parts of the file, each processed by it's own gouroutine and track somehow which parts were already processed.
Try this
func grabusernames() {
f, err := os.OpenFile("longlist.txt", os.O_RDONLY, os.ModePerm)
if err != nil {
log.Fatalf("open file error: %v", err)
return
}
defer f.Close()
rd := bufio.NewReader(f)
ch := make(chan struct{}, 10)
var sem sync.WaitGroup
for {
line, err := rd.ReadString('\n')
line2 := strings.TrimSpace(line)
if err != nil {
if err == io.EOF {
break
}
log.Fatalf("read file line error: %v", err)
return
}
ch <- struct{}{}
sem.Add(1)
go tellonym(line2, ch, &sem)
}
sem.Wait()
}
func tellonym(line2 string, ch chan struct{}, sem *sync.WaitGroup) {
defer func() {
sem.Done()
<-ch
}()
var client http.Client
resp, err := client.Get("https://tellonym.me/" + line2)
if err != nil {
log.Fatal(err)
}
defer resp.Body.Close()
//fmt.Println("Response code: ", resp.StatusCode)
if resp.StatusCode == 404 {
fmt.Println("Username" + line2 + "not taken")
} else if resp.StatusCode == 200 {
fmt.Println("username " + line2 + " taken")
} else {
fmt.Println("Something else, response code: ", resp.StatusCode)
}
}
I'm trying to do a benchmark with go using rpc and exec.command, here are parts of my code.
I have a master to send rpc to worker to do some job.
func main() {
var wg sync.WaitGroup
var clients []*rpc.Client
client, err := rpc.DialHTTP("tcp", "addr"+":1234")
if err != nil {
log.Fatal("dialing:", err)
}
reply := &Reply{}
args := &Args{}
clients = append(clients, client)
fmt.Println(clients)
err = clients[0].Call("Worker.Init", args, reply)
if err != nil {
log.Fatal("init error:", err)
}
// call for server to init channel
// err = client.Call("Worker.Init", args, reply)
args.A = 1
wg.Add(200)
fmt.Println(time.Now().UnixNano())
for i := 0; i < 200; i++ {
go func() {
defer wg.Done()
err = client.Call("Worker.DoJob", args, reply)
if err != nil {
log.Fatal("dojob error:", err)
}
fmt.Println("Done")
}()
}
wg.Wait()
fmt.Println(time.Now().UnixNano())
}
and worker's code
func (w *Worker) DoJob(args *Args, reply *Reply) error {
// find a channel to do it
w.c <- 1
runtime.LockOSThread()
fmt.Println("exec")
// cmd := exec.Command("docker", "run", "--rm", "ubuntu:16.04", "/bin/bash", "-c", "date +%s%N")
cmd := exec.Command("echo", "hello")
err := cmd.Run()
fmt.Println("exec done")
if err != nil {
reply.Err = err
fmt.Println(err)
}
fmt.Println("done")
<-w.c
return nil
}
I use a chan of size 12 to simulate that the machine has only 12 threads, and after I find it would stuck at cmd.Run(), I changed the command from running a docker to just simply echo hello, but it got still stucked between fmt.Println("exec") and fmt.Println("exec done").
I don'k know why is this happening? Am I sending out too many rpcs so a lot of rpcs will be dropped?
I use the library (http://github.com/fsnotify/fsnotify) to monitor the file system.
I m trying to adjust the repository example to match my requirements, but when i do so the program is not working anymore.
I commented the done channel within the ExampleNewWatcher function
done := make(chan bool)
<-done
As a result, now when i run the example, this channel does not output anything anymore.
event, ok := <-watcher.Events
Complete code:
package main
import (
"github.com/fsnotify/fsnotify"
"log"
"os"
"strconv"
"time"
)
func ExampleNewWatcher() {
watcher, err := fsnotify.NewWatcher()
if err != nil {
log.Fatal(err)
}
defer watcher.Close()
done := make(chan bool) // if i commen this line the `panic` not norking
go func() {
for {
select {
case event, ok := <-watcher.Events:
if !ok {
return
}
log.Println("event:", event)
if event.Op&fsnotify.Write == fsnotify.Write {
log.Println("modified file:", event.Name)
}
panic("just for test") // new output this line
case err, ok := <-watcher.Errors:
if !ok {
return
}
log.Println("error:", err)
}
}
}()
err = watcher.Add("/tmp/foo")
if err != nil {
log.Fatal(err)
}
<-done // comment this
}
func check(err error) {
if err != nil {
panic(err)
}
}
func main() {
// create the test file
var err error
file, err := os.Create("/tmp/foo")
check(err)
_, err = file.Write([]byte("hello world"))
check(err)
stopchan := make(chan struct{})
// test change file
go func() {
for i := 0; i < 10; i++ {
file1, err := os.OpenFile("/tmp/foo", os.O_RDWR, 0644)
check(err)
d := strconv.Itoa(i) + "hello world"
_, err = file1.Write([]byte(d))
check(err)
err = file1.Close()
check(err)
time.Sleep(2 * time.Second) // wait the context writed to the file
}
}()
ExampleNewWatcher() // monitor file
stopchan <- struct{}{}
}
I'm writing a function that exec's a program and returns stdout and stderr. It also has the option to display the output to the console. I'm clearly not waiting on something, as if I run the function twice in a row, the outputs are different. Here's a sample program, replace the dir var with a dir with a lot of files to fill up the buffers:
func main() {
dir := "SOMEDIRECTORYWITHALOTOFFILES"
out, err := run("ls -l "+dir, true)
if err != nil {
log.Fatalf("run returned %s", err)
}
log.Printf("Out: %s", out)
out2, err := run("ls -l "+dir, false)
if err != nil {
log.Fatalf("run returned %s", err)
}
log.Printf("Out2: %s", out2)
if out != out2 {
log.Fatalf("Out mismatch")
}
}
func run(cmd string, displayOutput bool) (string, error) {
var command *exec.Cmd
command = exec.Command("/bin/sh", "-c", cmd)
var output bytes.Buffer
stdout, err := command.StdoutPipe()
if err != nil {
return "", fmt.Errorf("Unable to setup stdout for command: %v", err)
}
go func() {
if displayOutput == true {
w := io.MultiWriter(os.Stdout, &output)
io.Copy(w, stdout)
} else {
output.ReadFrom(stdout)
}
}()
stderr, err := command.StderrPipe()
if err != nil {
return "", fmt.Errorf("Unable to setup stderr for command: %v", err)
}
go func() {
if displayOutput == true {
w := io.MultiWriter(os.Stderr, &output)
io.Copy(w, stderr)
} else {
output.ReadFrom(stderr)
}
}()
err = command.Run()
if err != nil {
return "", err
}
return output.String(), nil
}
Here is a simplified and working revision of your example. Note that the test command was swapped out so that I could test within Windows and that your error checks have been omitted only for brevity.
The key change is that a sync.WaitGroup is preventing the run function from printing the output and returning until the goroutine has indicated that it's finished.
func main() {
dir := "c:\\windows\\system32"
command1 := exec.Command("cmd", "/C", "dir", "/s", dir)
command2 := exec.Command("cmd", "/C", "dir", "/s", dir)
out1, _ := run(command1)
out2, _ := run(command2)
log.Printf("Length [%d] vs [%d]\n", len(out1), len(out2))
}
func run(cmd *exec.Cmd) (string, error) {
var output bytes.Buffer
var waitGroup sync.WaitGroup
stdout, _ := cmd.StdoutPipe()
writer := io.MultiWriter(os.Stdout, &output)
waitGroup.Add(1)
go func() {
defer waitGroup.Done()
io.Copy(writer, stdout)
}()
cmd.Run()
waitGroup.Wait()
return output.String(), nil
}
I see some problems:
You should be waiting for the goroutines to finish (e.g., using
sync.WaitGroup).
You're accessing output concurrently in two
goroutines, which is not safe.
You could collect stdout and stderr in two separate buffers and return them separately, if that works for what you're trying to do.
I need to be able to run an external application and interact with it as though I was manually running it from the command-line. All the examples I find only deal with running the program and capturing the output.
Below is a very simple example that I hope illustrates what I am trying to accomplish.
package main
import (
"fmt"
"log"
"os/exec"
)
func main() {
cmd := exec.Command("rm", "-i", "somefile.txt")
out, err := cmd.CombinedOutput()
if err != nil {
log.Fatal(err)
}
if string(out) == "Remove file 'somefile.txt'?" {
// send the response 'y' back to the rm process
}
// program completes normally...
}
I've tried to tweak various examples that I've found to accomplish this with zero success. It seems that even though 'rm' is waiting for a response, Go closes the process.
Any examples, articles, or advice you can provide would be greatly appreciated. Many thanks in advance.
You have two possibilities. First is to use ReadLine() but that works only if application output is full lines, and you can wait for \n. This is not the case with rm, so you have to develop a custom SplitFunction for Scanner. Both versions can be found below.
Please note that you can not use CombinedOutput, as it can not be Scanned. You have to use the pipes.
package main
import (
"bufio"
//"fmt"
"log"
"os/exec"
)
func main() {
cmd := exec.Command("rm", "-i", "somefile.txt")
// Stdout + stderr
out, err := cmd.StderrPipe() // rm writes the prompt to err
if err != nil {
log.Fatal(err)
}
r := bufio.NewReader(out)
// Stdin
in, err := cmd.StdinPipe()
if err != nil {
log.Fatal(err)
}
defer in.Close()
// Start the command!
err = cmd.Start()
if err != nil {
log.Fatal(err)
}
line, _, err := r.ReadLine()
for err != nil {
if string(line) == "Remove file 'somefile.txt'?" {
in.Write([]byte("y\n"))
}
line, _, err = r.ReadLine()
}
// program completes normally...s
}
This is a second version with the scanner, and it uses both \n and ? as line delimiters:
package main
import (
"bufio"
"bytes"
"fmt"
"log"
"os/exec"
)
// Ugly hack, this is bufio.ScanLines with ? added as an other delimiter :D
func new_scanner(data []byte, atEOF bool) (advance int, token []byte, err error) {
if atEOF && len(data) == 0 {
return 0, nil, nil
}
if i := bytes.IndexByte(data, '\n'); i >= 0 {
// We have a full newline-terminated line.
fmt.Printf("nn\n")
return i + 1, data[0:i], nil
}
if i := bytes.IndexByte(data, '?'); i >= 0 {
// We have a full ?-terminated line.
return i + 1, data[0:i], nil
}
// If we're at EOF, we have a final, non-terminated line. Return it.
if atEOF {
return len(data), data, nil
}
// Request more data.
return 0, nil, nil
}
func main() {
cmd := exec.Command("rm", "-i", "somefile.txt")
// Stdout + stderr
out, err := cmd.StderrPipe() // Again, rm writes prompts to stderr
if err != nil {
log.Fatal(err)
}
scanner := bufio.NewScanner(out)
scanner.Split(new_scanner)
// Stdin
in, err := cmd.StdinPipe()
if err != nil {
log.Fatal(err)
}
defer in.Close()
// Start the command!
err = cmd.Start()
if err != nil {
log.Fatal(err)
}
// Start scanning
for scanner.Scan() {
line := scanner.Text()
if line == "rm: remove regular empty file ‘somefile.txt’" {
in.Write([]byte("y\n"))
}
}
// Report scanner's errors
if err := scanner.Err(); err != nil {
log.Fatal(err)
}
// program completes normally...s
}