Strange behaviour of golang exec.Command program - go

I have such code:
func main() {
s := "foobar"
cmd := exec.Command("wc", "-l")
stdin, err := cmd.StdinPipe()
if err != nil {
log.Panic(err)
}
stdout, err := cmd.StdoutPipe()
if err != nil {
log.Panic(err)
}
err = cmd.Start()
if err != nil {
log.Panic(err)
}
io.Copy(stdin, bytes.NewBufferString(s))
stdin.Close()
io.Copy(os.Stdout, stdout)
err = cmd.Wait()
if err != nil {
log.Panic(err)
}
}
and its output is:
0
But when I will do simple modification:
func main() {
runWcFromStdinWorks("aaa\n")
runWcFromStdinWorks("bbb\n")
}
func runWcFromStdinWorks(s string) {
cmd := exec.Command("wc", "-l")
stdin, err := cmd.StdinPipe()
if err != nil {
log.Panic(err)
}
stdout, err := cmd.StdoutPipe()
if err != nil {
log.Panic(err)
}
err = cmd.Start()
if err != nil {
log.Panic(err)
}
io.Copy(stdin, bytes.NewBufferString(s))
stdin.Close()
io.Copy(os.Stdout, stdout)
err = cmd.Wait()
if err != nil {
log.Panic(err)
}
}
It works, but why? Its just calling method why first version is not working?

The string s in the first example does not have a new line, which causes wc -l to return 0. You can see this behavior by doing:
$ echo -n hello | wc -l
0

Related

Writing to ffpmeg stdin freezes program

I'm trying to convert a file in memory using ffmpeg to another format by using stdin and stdout, but everytime I try to write to stdin, of my ffmpeg command, it just freezes there.
package main
import (
"bufio"
"fmt"
"io"
"os"
"os/exec"
)
func test(bytes []byte) ([]byte, error) {
cmd := exec.Command(
"ffmpeg",
"-i", "pipe:0", // read from stdin
"-vcodec", "copy",
"-acodec", "copy",
"-f", "matroska",
"pipe:1",
)
in, err := cmd.StdinPipe()
if err != nil {
panic(err)
}
out, err := cmd.StdoutPipe()
if err != nil {
panic(err)
}
fmt.Println("starting")
err = cmd.Start()
if err != nil {
panic(err)
}
fmt.Println("writing")
w := bufio.NewWriter(in)
_, err = w.Write(bytes)
if err != nil {
panic(err)
}
err = w.Flush()
if err != nil {
panic(err)
}
err = in.Close()
if err != nil {
panic(err)
}
fmt.Println("reading")
outBytes, err := io.ReadAll(out)
if err != nil {
panic(err)
}
fmt.Println("waiting")
err = cmd.Wait()
if err != nil {
panic(err)
}
return outBytes, nil
}
func main() {
dat, err := os.ReadFile("speech.mp4")
if err != nil {
panic(err)
}
out, err := test(dat)
if err != nil {
panic(err)
}
err = os.WriteFile("test.m4v", out, 0644)
if err != nil {
panic(err)
}
}
It prints
starting
writing
and gets stuck there. I tried similar code with grep, and the everything worked fine, so this seems to be some ffmpeg specific problem.
I tried running
cat speech.mp4 | ffmpeg -i pipe:0 -vcodec copy -acodec copy -f matroska pipe:1 | cat > test.mkv
and that works fine, so it's not an ffmpeg problem, but some problem with how I'm piping/reading/writing my data.
My speech.mp4 file is around 2MB.
So the secret lied in reading stdout as you dumped the bytes into stdin, since writing to the pipe blocks. Thanks #JimB for helping me figure this out.
You just have to read as you write:
cmd := exec.Command(
"ffmpeg",
"-i", "pipe:0", // read from stdin
"-vcodec", "copy",
"-acodec", "copy",
"-f", "matroska",
"pipe:1",
)
out, err := cmd.StdoutPipe()
if err != nil {
panic(err)
}
in, err := cmd.StdinPipe()
writer := bufio.NewWriter(in)
if err != nil {
panic(err)
}
fmt.Println("starting")
err = cmd.Start()
if err != nil {
panic(err)
}
go func() {
defer writer.Flush()
defer in.Close()
fmt.Println("writing")
_, err = writer.Write(bytes)
if err != nil {
panic(err)
}
}()
var outBytes []byte
defer out.Close()
fmt.Println("reading")
outBytes, err = io.ReadAll(out)
if err != nil {
panic(err)
}
fmt.Println("waiting")
err = cmd.Wait()
if err != nil {
panic(err)
}
return outBytes, nil

How to send jpeg.Encode image?

I am creating a picture using jpeg.Encode and want to send it. How can I avoid creating an intermediate file?
Create and save file.
// ...
outFile, err := os.Create("./images/" + name + ".jpg")
if err != nil {
log.Fatal(err)
os.Exit(-1)
}
defer outFile.Close()
buff := bufio.NewWriter(outFile)
err = jpeg.Encode(buff, background, &jpeg.Options{Quality: 90})
if err != nil {
log.Fatal(err)
os.Exit(-1)
}
err = buff.Flush()
if err != nil {
log.Fatal(err)
os.Exit(-1)
}
Send file.
file, err := os.Open("./images/" + name + ".jpg")
if err != nil {
log.Fatal(err)
}
body := &bytes.Buffer{}
writer := multipart.NewWriter(body)
part, err := writer.CreateFormFile("photo", file.Name())
if err != nil {
return nil, err
}
io.Copy(part, file)
if err = writer.Close(); err != nil {
return nil, err
}
resp, err := http.Post(uploadURL, writer.FormDataContentType(), body)
if err != nil {
return nil, err
}
defer resp.Body.Close()
How can I send a photo without saving it on the server?
Simply pass the part target to jpeg.Encode():
// ...
part, err := writer.CreateFormFile("photo", file.Name())
if err != nil {
return nil, err
}
err = jpeg.Encode(part, background, &jpeg.Options{Quality: 90})
if err != nil {
return nil, err
}
if err = writer.Close(); err != nil {
return nil, err
}
resp, err := http.Post(uploadURL, writer.FormDataContentType(), body)
if err != nil {
return nil, err
}
defer resp.Body.Close()

golang scp file using crypto/ssh

I'm trying to download a remote file over ssh
The following approach works fine on shell
ssh hostname "tar cz /opt/local/folder" > folder.tar.gz
However the same approach on golang giving some difference in output artifact size. For example the same folders with pure shell produce artifact gz file 179B and same with go script 178B.
I assume that something has been missed from io.Reader or session got closed earlier. Kindly ask you guys to help.
Here is the example of my script:
func executeCmd(cmd, hostname string, config *ssh.ClientConfig, path string) error {
conn, _ := ssh.Dial("tcp", hostname+":22", config)
session, err := conn.NewSession()
if err != nil {
panic("Failed to create session: " + err.Error())
}
r, _ := session.StdoutPipe()
scanner := bufio.NewScanner(r)
go func() {
defer session.Close()
name := fmt.Sprintf("%s/backup_folder_%v.tar.gz", path, time.Now().Unix())
file, err := os.OpenFile(name, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0644)
if err != nil {
panic(err)
}
defer file.Close()
for scanner.Scan() {
fmt.Println(scanner.Bytes())
if err := scanner.Err(); err != nil {
fmt.Println(err)
}
if _, err = file.Write(scanner.Bytes()); err != nil {
log.Fatal(err)
}
}
}()
if err := session.Run(cmd); err != nil {
fmt.Println(err.Error())
panic("Failed to run: " + err.Error())
}
return nil
}
Thanks!
bufio.Scanner is for newline delimited text. According to the documentation, the scanner will remove the newline characters, stripping any 10s out of your binary file.
You don't need a goroutine to do the copy, because you can use session.Start to start the process asynchronously.
You probably don't need to use bufio either. You should be using io.Copy to copy the file, which has an internal buffer already on top of any buffering already done in the ssh client itself. If an additional buffer is needed for performance, wrap the session output in a bufio.Reader
Finally, you return an error value, so use it rather than panic'ing on regular error conditions.
conn, err := ssh.Dial("tcp", hostname+":22", config)
if err != nil {
return err
}
session, err := conn.NewSession()
if err != nil {
return err
}
defer session.Close()
r, err := session.StdoutPipe()
if err != nil {
return err
}
name := fmt.Sprintf("%s/backup_folder_%v.tar.gz", path, time.Now().Unix())
file, err := os.OpenFile(name, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0644)
if err != nil {
return err
}
defer file.Close()
if err := session.Start(cmd); err != nil {
return err
}
n, err := io.Copy(file, r)
if err != nil {
return err
}
if err := session.Wait(); err != nil {
return err
}
return nil
You can try doing something like this:
r, _ := session.StdoutPipe()
reader := bufio.NewReader(r)
go func() {
defer session.Close()
// open file etc
// 10 is the number of bytes you'd like to copy in one write operation
p := make([]byte, 10)
for {
n, err := reader.Read(p)
if err == io.EOF {
break
}
if err != nil {
log.Fatal("err", err)
}
if _, err = file.Write(p[:n]); err != nil {
log.Fatal(err)
}
}
}()
Make sure your goroutines are synchronized properly so output is completeky written to the file.

How to implement "tar cvfz xxx.tar.gz " in golang?

I would like to decompress tar-gz file by golang.
err := DecompressTarGz('xxx.tar.gz', '/Users/foobarbuzz/')
Use compress/gzip in combination with archive/tar or use os/exec to call tar and gzip directly if you don't like to implement all of that in Go.
I was looking for an answer to this one too. Implemented something that should work.
func Decompress(targetdir string, reader io.ReadCloser) error {
gzReader, err := gzip.NewReader(reader)
if err != nil {
return err
}
defer gzReader.Close()
tarReader := tar.NewReader(gzReader)
for {
header, err := tarReader.Next()
if err == io.EOF {
break
} else if err != nil {
return err
}
target := path.Join(targetdir, header.Name)
switch header.Typeflag {
case tar.TypeDir:
err = os.MkdirAll(target, os.FileMode(header.Mode))
if err != nil {
return err
}
setAttrs(target, header)
break
case tar.TypeReg:
w, err := os.Create(target)
if err != nil {
return err
}
_, err = io.Copy(w, tarReader)
if err != nil {
return err
}
w.Close()
setAttrs(target, header)
break
default:
log.Printf("unsupported type: %v", header.Typeflag)
break
}
}
return nil
}
func setAttrs(target string, header *tar.Header) {
os.Chmod(target, os.FileMode(header.Mode))
os.Chtimes(target, header.AccessTime, header.ModTime)
}

Read stderr after process finished

I call imagemagick's convert command with some data I have in memory (from html form upload/web server). This works fine, but I'd like to get the error output of convert in case of an error. How can I do that?
This is my code:
package main
import (
"bytes"
"io"
"io/ioutil"
"log"
"os/exec"
"path/filepath"
)
func runImagemagick(data []byte, destfilename string) error {
data_buf := bytes.NewBuffer(data)
cmd := exec.Command("convert", "-", destfilename)
stdin, err := cmd.StdinPipe()
if err != nil {
return err
}
err = cmd.Start()
if err != nil {
return err
}
_, err = io.Copy(stdin, data_buf)
if err != nil {
return err
}
stdin.Close()
err = cmd.Wait()
if err != nil {
return err
}
return nil
}
func main() {
data, err := ioutil.ReadFile("source.gif")
if err != nil {
log.Fatal(err)
}
err = runImagemagick(data, filepath.Join("/tmp", "abc", "dest.png"))
if err != nil {
log.Fatal(err)
}
}
Now the artificial problem is that the directory /tmp/abc/ does not exist. Normally convert would give me this result:
$ convert - /tmp/abc/foo.png < source.gif
convert: unable to open image `/tmp/abc/foo.png': No such file or directory # error/blob.c/OpenBlob/2617.
convert: WriteBlob Failed `/tmp/abc/foo.png' # error/png.c/MagickPNGErrorHandler/1755.
but I don't "see" this error message within my small program. How can I get the error message and show it to my user?
(And another sub-question is: can you give me an advice if this code looks OK? Are there any obvious flaws in it?)
Pipe stdout and stderr too. For example,
package main
import (
"bytes"
"io"
"io/ioutil"
"log"
"os/exec"
"path/filepath"
)
func runImagemagick(data []byte, destfilename string) error {
cmd := exec.Command("convert", "-", destfilename)
stdin, err := cmd.StdinPipe()
if err != nil {
return err
}
stdout, err := cmd.StdoutPipe()
if err != nil {
return err
}
stderr, err := cmd.StderrPipe()
if err != nil {
return err
}
err = cmd.Start()
if err != nil {
return err
}
_, err = io.Copy(stdin, bytes.NewBuffer(data))
if err != nil {
return err
}
stdin.Close()
outData, err := ioutil.ReadAll(stdout)
if err != nil {
return err
}
if len(outData) > 0 {
log.Print(string(outData))
}
errData, err := ioutil.ReadAll(stderr)
if err != nil {
return err
}
if len(errData) > 0 {
log.Print(string(errData))
}
err = cmd.Wait()
if err != nil {
return err
}
return nil
}
func main() {
data, err := ioutil.ReadFile("source.gif")
if err != nil {
log.Fatal(err)
}
err = runImagemagick(data, filepath.Join("/tmp", "abc", "dest.png"))
if err != nil {
log.Fatal(err)
}
}
Output:
2013/03/03 15:02:20 convert.im6: unable to open image `/tmp/abc/dest-0.png': No such file or directory # error/blob.c/OpenBlob/2638.
convert.im6: WriteBlob Failed `/tmp/abc/dest-0.png' # error/png.c/MagickPNGErrorHandler/1728.
2013/03/03 15:02:20 exit status 1
exit status 1
There's no need to use pipes because bytes.Buffer implements the io.Writer interface and so it can be used just fine to collect the program's output:
func runImagemagick(data []byte, destfilename string) error {
cmd := exec.Command("convert", "-", destfilename)
var stdout, stderr bytes.Buffer
cmd.Stdout = &stdout
cmd.Stderr = &stderr
err := cmd.Run()
if err != nil {
if ee, ok := err.(*exec.ExitError); ok {
return &imagemagickError{ee, stdout.Bytes(), stderr.Bytes()}
} else {
return err
}
}
if stderr.Len() > 0 {
return errors.New(fmt.Sprintf("imagemagick wrote to stderr: %s", stderr.Bytes()))
}
if stdout.Len() > 0 {
log.Print(stdout.Bytes())
}
return nil
}

Resources