How can I serve files while using GRPC - go

Is there any way how to serve files in Go with GRPC, like in gin-gonic's variant:
router.Static("/static", "/var/www")

You can't do it exactly like that.
But you can use the proto bytes type and put the file bytes in that field.
Also (as pointed out in the comments) with large files you should use streaming instead of a unary call. (most GRPC implementation have a limit of 4MB per message).
Proto example:
syntax = "proto3";
message Response {
bytes fileChunk = 1;
}
message Request {
string fileName = 1;
}
service TestService {
rpc Download(Request) returns (stream Response);
}
Server implementation example:
func (srv *Server) Download(req *pbgo.Request, responseStream pbgo.TestService_DownloadServer) error {
bufferSize := 64 *1024 //64KiB, tweak this as desired
file, err := os.Open(req.GetFileName())
if err != nil {
fmt.Println(err)
return err
}
defer file.Close()
buff := make([]byte, bufferSize)
for {
bytesRead, err := file.Read(buff)
if err != nil {
if err != io.EOF {
fmt.Println(err)
}
break
}
resp := &pbgo.Response{
FileChunk: buff[:bytesRead],
}
err = responseStream.Send(resp)
if err != nil {
log.Println("error while sending chunk:", err)
return err
}
}
return nil
}
Client would call it like this:
conn, err := grpc.Dial("localhost:9090", grpc.WithInsecure())
if err != nil {
log.Fatal("client could connect to grpc service:", err)
}
c := pbgo.NewTestServiceClient(conn)
fileStreamResponse, err := c.Download(context.TODO(), &pbgo.Request{
FileName: "test.txt",
})
if err != nil {
log.Println("error downloading:", err)
return
}
for {
chunkResponse, err := fileStreamResponse.Recv()
if err == io.EOF {
log.Println("received all chunks")
break
}
if err != nil {
log.Println("err receiving chunk:", err)
break
}
log.Printf("got new chunk with data: %s \n", chunkResponse.FileChunk)
}
If you need to be able to serve arbitrary files, you would need to handle which files you allow serving (say someone requests the file /etc/passwd or something).
Not sure what exactly is the use case here.

Related

Transfering file using tcp golang

I'm trying to make a music app that sends file through tcp protocol using go and microservice architecture. Now I'm creating a player service that should:
Get user token and get claims from it
Check is user exists using claims and user_service microservice
Get song from redis
Check is song exists using music_service
Read file by chunks and send it to client using tcp
Redis data looks like this:
{
"user_id": [{
"song_id": "<song_id>"
}]
}
But I faced with a small problem. My music files stored in a flac format and when I receive it on the client, my player doesn't play it. I don't really know what can be the problem. So here's my code:
SERVER
service_setup.go
//this function is called in main function
func setService() {
ln, err := net.Listen("tcp", config.TCPAddress)
if err != nil {
panic("couldn't start tcp server")
}
defer ln.Close()
for {
conn, err := ln.Accept()
if err != nil {
logger.ErrorLog(fmt.Sprintf("Error: couldn't accept connection. Details: %v", err))
return
}
service.DownloadSong(conn)
}
}
downloader_service.go
func DownloadSong(conn net.Conn) {
token, err := bufio.NewReader(conn).ReadString('\n')
if err != nil {
logger.ErrorLog(fmt.Sprintf("Error: couldn't get token. Details: %v", token))
conn.Close()
return
}
claims, err := jwt_funcs.DecodeJwt(token)
if err != nil {
conn.Close()
return
}
songs, err := redis_repo.Get(claims.Id)
if err != nil {
conn.Close()
return
}
for _, song := range songs {
download(song, conn)
}
}
func download(song models.SongsModel, conn net.Conn) {
filePath, err := filepath.Abs(fmt.Sprintf("./songs/%s.flac", song.SongId))
if err != nil {
logger.ErrorLog(fmt.Sprintf("Errror: couldn't create filepath. Details: %v", err))
conn.Close()
return
}
file, err := os.Open(filePath)
defer file.Close()
if err != nil {
logger.ErrorLog(fmt.Sprintf("Errror: couldn't open file. Details: %v", err))
conn.Close()
return
}
read(file, conn)
}
func read(file *os.File, conn net.Conn) {
reader := bufio.NewReader(file)
buf := make([]byte, 15)
defer conn.Close()
for {
_, err := reader.Read(buf)
if err != nil && err == io.EOF {
logger.InfoLog(fmt.Sprintf("Details: %v", err))
fmt.Println()
return
}
conn.Write(buf)
}
}
CLIENT
main.go
func main() {
conn, _ := net.Dial("tcp", "127.0.0.1:6060")
var glMessage []byte
text := "eyJhbGciOiJFUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjYzYzlhNmE1OWI3ZmQyNTQ2ZjA4ZWEyYSIsInVzZXJuYW1lIjoiMTIiLCJleHAiOjE2NzQyMTE5ODl9.aarSDhrFF1df3i2pIRyjNxTfSHKObqLU3kHJiPreredIhLNCzs7z7jMgRHQIcLaIvCOECN7bX0OaSvKdW7VKsQ\n"
fmt.Fprint(conn, text)
reader := bufio.NewReader(conn)
b := make([]byte, 15)
c := 0
for i, _ := reader.Read(b); int(i) != 0; i, _ = reader.Read(b) {
c += i
glMessage = append(glMessage, b...)
}
os.WriteFile("./test.flac", glMessage, 0644)
}
If you know what can be the problem, please tell me. I'd really appreciate it!
It looks like you're trying to send the music file over the network in 15 byte chunks, which is likely not enough to play the song on the client side.
You can try increasing the chunk size, for example, to 8192 bytes. To do this, replace buf := make([]byte, 15) with buf := make([]byte, 8192).
Also, it's better to write the received data directly to the file rather than storing it in memory. You can do this by creating a file and using os.Create to write the received data to it:
file, err := os.Create("./test.flac")
if err != nil {
fmt.Println("Error: couldn't create file")
return
}
defer file.Close()
for {
i, err := reader.Read(buf)
if err != nil && err == io.EOF {
break
}
file.Write(buf[:i])
}
I believe that this can solve the issue.

gorilla websocket NextWriter and WriteMessage() difference

i have function:
func write() {
defer func() {
serverConn.Close()
}()
for message := range msgChan {
w, err := serverConn.NextWriter(websocket.TextMessage)
if err != nil {
return
}
bmessage, err := json.Marshal(message)
if err != nil {
return
}
_, err = w.Write(bmessage)
if err != nil {
fmt.Println(err)
}
if err := w.Close(); err != nil {
return
}
}
}
And i got
panic: concurrent write to websocket connection
I'm wondering how is this possible? in ws writes only this function, which is running in 1 instance. and second question: what is the point of using NextWriter instead of just conn.WriteMessage()? Is it possible that with a large number of messages NextWriter accumulate and can try to write at the same time?

kafka retry many times when i download large file

I am newbie in kafka, i try build a service send mail with attach files.
Execution flow:
Kafka will receive a message to send mail
function get file will download file from url , scale image, and save file
when send mail i will get files from folder and attach to form
Issues:
when i send mail with large files many times , kafka retry many times, i will receive many mail
kafka error: "kafka server: The provided member is not known in the current generation"
I listened MaxProcessingTime , but i try to test a mail with large file, it still work fine
Kafka info : 1 broker , 3 consumer
func (s *customerMailService) SendPODMail() error { filePaths, err := DownloadFiles(podURLs, orderInfo.OrderCode)
if err != nil{
countRetry := 0
for countRetry <= NUM_OF_RETRY{
filePaths, err = DownloadFiles(podURLs, orderInfo.OrderCode)
if err == nil{
break
}
countRetry++
}
}
err = s.sendMailService.Send(ctx, orderInfo.CustomerEmail, tmsPod, content,filePaths)}
function download file :
func DownloadFiles(files []string, orderCode string) ([]string, error) {
var filePaths []string
err := os.Mkdir(tempDir, 0750)
if err != nil && !os.IsExist(err) {
return nil, err
}
tempDirPath := tempDir + "/" + orderCode
err = os.Mkdir(tempDirPath, 0750)
if err != nil && !os.IsExist(err) {
return nil, err
}
for _, fileUrl := range files {
fileUrlParsed, err := url.ParseRequestURI(fileUrl)
if err != nil {
logrus.WithError(err).Infof("Pod url is invalid %s", orderCode)
return nil, err
}
extFile := filepath.Ext(fileUrlParsed.Path)
dir, err := os.MkdirTemp(tempDirPath, "tempDir")
if err != nil {
return nil, err
}
f, err := os.CreateTemp(dir, "tmpfile-*"+extFile)
if err != nil {
return nil, err
}
defer f.Close()
response, err := http.Get(fileUrl)
if err != nil {
return nil, err
}
defer response.Body.Close()
contentTypes := response.Header["Content-Type"]
isTypeAllow := false
for _, contentType := range contentTypes {
if contentType == "image/png" || contentType == "image/jpeg" {
isTypeAllow = true
}
}
if !isTypeAllow {
logrus.WithError(err).Infof("Pod image type is invalid %s", orderCode)
return nil, errors.New("Pod image type is invalid")
}
decodedImg, err := imaging.Decode(response.Body)
if err != nil {
return nil, err
}
resizedImg := imaging.Resize(decodedImg, 1024, 0, imaging.Lanczos)
imaging.Save(resizedImg, f.Name())
filePaths = append(filePaths, f.Name())
}
return filePaths, nil}
function send mail
func (s *tikiMailService) SendFile(ctx context.Context, receiver string, templateCode string, data interface{}, filePaths []string) error {
path := "/v1/emails"
fullPath := fmt.Sprintf("%s%s", s.host, path)
formValue := &bytes.Buffer{}
writer := multipart.NewWriter(formValue)
_ = writer.WriteField("template", templateCode)
_ = writer.WriteField("to", receiver)
if data != nil {
b, err := json.Marshal(data)
if err != nil {
return errors.Wrapf(err, "Cannot marshal mail data to json with object %+v", data)
}
_ = writer.WriteField("params", string(b))
}
for _, filePath := range filePaths {
part, err := writer.CreateFormFile(filePath, filepath.Base(filePath))
if err != nil {
return err
}
pipeReader, pipeWriter := io.Pipe()
go func() {
defer pipeWriter.Close()
file, err := os.Open(filePath)
if err != nil {
return
}
defer file.Close()
io.Copy(pipeWriter, file)
}()
io.Copy(part, pipeReader)
}
err := writer.Close()
if err != nil {
return err
}
request, err := http.NewRequest("POST", fullPath, formValue)
if err != nil {
return err
}
request.Header.Set("Content-Type", writer.FormDataContentType())
resp, err := s.doer.Do(request)
if err != nil {
return errors.Wrap(err, "Cannot send request to send email")
}
defer resp.Body.Close()
b, err := ioutil.ReadAll(resp.Body)
if err != nil {
return err
}
if resp.StatusCode != http.StatusOK {
return errors.New(fmt.Sprintf("Send email with code %s error: status code %d, response %s",
templateCode, resp.StatusCode, string(b)))
} else {
logrus.Infof("Send email with attachment ,code %s success with response %s , box-code", templateCode, string(b),filePaths)
}
return nil
}
Thank
My team found my problem when I redeploy k8s pods, which lead to conflict leader partition causing rebalance. It will try to process the remaining messages in buffer of pods again.
Solution: I don't fetch many messages saved in buffer , I just get a message and process it by config :
ChannelBufferSize = 0
Example conflict leader parition:
consumer A and B startup in the same time
consumer A registers itself as leader, and owns the topic with all partitions
consumer B registers itself as leader, and then begins to rebalance and owns all partitions
consumer A rebalance and obtains all partitions, but can not consume because the memberId is old and need a new one
consumer B rebalance again and owns the topic with all partitions, but it's already obtained by consumer A
My two cents: in case of very big attachments, the consumer takes quite a lot of time to read the file and to send it as an attachment.
This increases the amount of time between two poll() calls. If that time is greater than max.poll.interval.ms, the consumer is thought to be failed and the partition offset is not committed. As a result, the message is processed again and eventually, if by chance the execution time stays below the poll interval, the offset is committed. The effect is a multiple email send.
Try increasing the max.poll.interval.ms on the consumer side.

Convert protobuf serialized messages to JSON without precompiling Go code

I want to convert protobuf serialized messages into a human readable JSON format. The major problem I face is that I need to do this without compiling the proto descriptor into Go code beforehand. I have access to the .proto files at runtime, but not at compile time.
I had the impression that the new Protobuf API v2 (https://github.com/protocolbuffers/protobuf-go) supports dynamic deserialization (see package types/dynamicpb), but I couldn't figure out how to use it apparently:
func readDynamically(in []byte) {
// How do I load the required descriptor (for NewMessage()) from my `addressbook.proto` file?)
descriptor := ??
msg := dynamicpb.NewMessage(descriptor)
err := protojson.Unmarshal(in, msg)
if err != nil {
panic(err)
}
}
Above code is annotated with my problem: How can I get the required descriptor for the dynamicpb.NewMessage() from a .proto file?
Should work like this with the dynamicpb package.
func readDynamically(in []byte) {
registry, err := createProtoRegistry(".", "addressbook.proto")
if err != nil {
panic(err)
}
desc, err := registry.FindFileByPath("addressbook.proto")
if err != nil {
panic(err)
}
fd := desc.Messages()
addressBook := fd.ByName("AddressBook")
msg := dynamicpb.NewMessage(addressBook)
err = proto.Unmarshal(in, msg)
jsonBytes, err := protojson.Marshal(msg)
if err != nil {
panic(err)
}
fmt.Println(string(jsonBytes))
if err != nil {
panic(err)
}
}
func createProtoRegistry(srcDir string, filename string) (*protoregistry.Files, error) {
// Create descriptors using the protoc binary.
// Imported dependencies are included so that the descriptors are self-contained.
tmpFile := filename + "-tmp.pb"
cmd := exec.Command("./protoc/protoc",
"--include_imports",
"--descriptor_set_out=" + tmpFile,
"-I"+srcDir,
path.Join(srcDir, filename))
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
err := cmd.Run()
if err != nil {
return nil, err
}
defer os.Remove(tmpFile)
marshalledDescriptorSet, err := ioutil.ReadFile(tmpFile)
if err != nil {
return nil, err
}
descriptorSet := descriptorpb.FileDescriptorSet{}
err = proto.Unmarshal(marshalledDescriptorSet, &descriptorSet)
if err != nil {
return nil, err
}
files, err := protodesc.NewFiles(&descriptorSet)
if err != nil {
return nil, err
}
return files, nil
}
This question is kind of interesting. I have done some works on protobuf plugs. As far as i can tell, additional cli is needed because we don't want to "reinvent the wheel".
Step one, we need protoc to translate ".proto" file to some format so we can get "protoreflect.MessageDescriptor" easily.
This plug is to get raw bytes which protoc sends to other plugs as input.
package main
import (
"fmt"
"io/ioutil"
"os"
)
func main() {
if len(os.Args) == 2 && os.Args[1] == "--version" {
// fmt.Fprintf(os.Stderr, "%v %v\n", filepath.Base(os.Args[0]), version.String())
os.Exit(0)
}
in, err := ioutil.ReadAll(os.Stdin)
if err != nil {
fmt.Printf("error: %v", err)
return
}
ioutil.WriteFile("./out.pb", in, 0755)
}
build and rename it as protoc-gen-raw, then generate protoc --raw_out=./pb ./server.proto, you will get out.pb. Forget your ".proto" file from now on, and put this "out.pb" where you intend to put ".proto". And what we get is official support with this .pb file.
Step 2: Deserialize a protobuf serialized message into JSON.
package main
import (
"fmt"
"io/ioutil"
"google.golang.org/protobuf/proto"
"google.golang.org/protobuf/compiler/protogen"
"google.golang.org/protobuf/encoding/protojson"
"google.golang.org/protobuf/types/dynamicpb"
"google.golang.org/protobuf/types/pluginpb"
)
func main() {
in, err := ioutil.ReadFile("./out.pb")
if err != nil {
fmt.Printf("failed to read proto file: %v", err)
return
}
req := &pluginpb.CodeGeneratorRequest{}
if err := proto.Unmarshal(in, req); err != nil {
fmt.Printf("failed to unmarshal proto: %v", err)
return
}
gen, err := protogen.Options{}.New(req)
if err != nil {
fmt.Printf("failed to create new plugin: %v", err)
return
}
// serialize protobuf message "ServerConfig"
data := &ServerConfig{
GameType: 1,
ServerId: 105,
Host: "host.host.host",
Port: 10024,
}
raw, err := data.Marshal()
if err != nil {
fmt.Printf("failed to marshal protobuf: %v", err)
return
}
for _, f := range gen.Files {
for _, m := range f.Messages {
// "ServerConfig" is the message name of the serialized message
if m.GoIdent.GoName == "ServerConfig" {
// m.Desc is MessageDescriptor
msg := dynamicpb.NewMessage(m.Desc)
// unmarshal []byte into proto message
err := proto.Unmarshal(raw, msg)
if err != nil {
fmt.Printf("failed to Unmarshal protobuf data: %v", err)
return
}
// marshal message into json
jsondata, err := protojson.Marshal(msg)
if err != nil {
fmt.Printf("failed to Marshal to json: %v", err)
return
}
fmt.Printf("out: %v", string(jsondata))
}
}
}
}
// the output is:
// out: {"gameType":1, "serverId":105, "host":"host.host.host", "port":10024}

Trouble getting content type of file in Go

I have a function in which I take in a base64 string and get the content of it (PDF or JPEG).
I read in the base64 content, convert it to bytes and decode it into the file that it is.
I then create a file where I will output the decoded file (JPEG or PDF).
Then I write the bytes to it.
Then I call my GetFileContentType on it and it returns to me an empty string.
If I run the functions separately, as in I first the first function to create the decoded file, and end it. And then call the second function to get the content type, it works and returns it as JPEG or PDF.
What am I doing wrong here?
And is there a better way to do this?
func ConvertToJPEGBase64(
src string,
dst string,
) error {
b, err := ioutil.ReadFile(src)
if err != nil {
return err
}
str := string(b)
byteArray, err := base64.StdEncoding.DecodeString(str)
if err != nil {
return err
}
f, err := os.Create(dst)
if err != nil {
return err
}
if _, err := f.Write(byteArray); err != nil {
return err
}
f.Sync()
filetype, err := client.GetFileContentType(f)
if err != nil {
return err
}
if strings.Contains(filetype, "jpeg") {
// do something
} else {
// do something else
}
return nil
}
// GetFileContentType tells us the type of file
func GetFileContentType(out *os.File) (string, error) {
// Only the first 512 bytes are used to sniff the content type.
buffer := make([]byte, 512)
_, err := out.Read(buffer)
if err != nil {
return "", err
}
contentType := http.DetectContentType(buffer)
return contentType, nil
}
The problem is that GetFileContentType reads from the end of the file. Fix this be seeking back to the beginning of the file before calling calling GetFileContentType:
if _, err := f.Seek(io.SeekStart, 0); err != nil {
return err
}
A better fix is to use the file data that's already in memory. This simplifies the code to the point where there's no need for the GetFileContentType function.
func ConvertToJPEGBase64(
src string,
dst string,
) error {
b, err := ioutil.ReadFile(src)
if err != nil {
return err
}
str := string(b)
byteArray, err := base64.StdEncoding.DecodeString(str)
if err != nil {
return err
}
f, err := os.Create(dst)
if err != nil {
return err
}
defer f.Close() // <-- Close the file on return.
if _, err := f.Write(byteArray); err != nil {
return err
}
fileType := http.DetectContentType(byteArray) // <-- use data in memory
if strings.Contains(fileType, "jpeg") {
// do something
} else {
// do something else
}
return nil
}
More code can be eliminated by using ioutil.WriteFile:
func ConvertToJPEGBase64(src, dst string) error {
b, err := ioutil.ReadFile(src)
if err != nil {
return err
}
byteArray, err := base64.StdEncoding.DecodeString(string(b))
if err != nil {
return err
}
if err := ioutil.WriteFile(dst, byteArray, 0666); err != nil {
return err
}
fileType := http.DetectContentType(byteArray)
if strings.Contains(fileType, "jpeg") {
// do something
} else {
// do something else
}
return nil
}

Resources