Can't get parser.ParseDir to parse Import statements - go

I'm having trouble getting parser.ParseDir to return any information in the returned struct.Imports field. parser.ParseFile does seem to work however. I would much rather use ParseDir than have to go through and call ParseFile on each file. Here's what I've tried:
package crawlers
import (
"fmt"
"go/parser"
"go/token"
"os"
"path/filepath"
)
type GoImports struct {
//imports []*string
}
//returns fullpaths
func (g *GoImports) Crawl(entry string) ([]*string, error) {
fp, err := filepath.Abs(entry)
if err != nil {
return nil, err
}
info, err := os.Stat(fp)
if err != nil {
return nil, err
}
if !info.IsDir() {
return nil, fmt.Errorf("Entry point should be a path to a directory of a go package.")
}
fset := token.NewFileSet()
//foo, err := parser.ParseDir(fset, fp, nil, parser.ImportsOnly)
//foo, err := parser.ParseDir(fset, "/Users/dowen00/local/go/src/got/crawlers/testdata/barpackage", nil, parser.ImportsOnly)
foo, err := parser.ParseDir(fset, "/Users/dowen00/local/go/src/opscli", func(os.FileInfo) bool { return true }, parser.ImportsOnly)
//_, err = parser.ParseDir(fset, "/Users/dowen00/local/go/src/got/crawlers/testdata", nil, parser.ImportsOnly)
//foo, err := parser.ParseFile(fset, "/Users/dowen00/local/go/src/got/crawlers/testdata/barpackage/bar.go", nil, parser.ImportsOnly)
if err != nil {
return nil, err
}
//fmt.Printf("%c\n", foo)
//fmt.Printf("%c\n", fset)
for k, v := range foo {
//for kv, vv := range v.Imports {
//fmt.Printf("%s: %c\n", kv, vv)
//}
fmt.Printf("%s: %c\n", k, v.Imports)
}
return nil, nil
}
What am I doing wrong?

I was able to work around the problem. If this, in fact, hasn't been implemented the go docs at https://golang.org/pkg/go/parser/#ParseDir should probably be revised to avoid wasting people's time.
Here's the work around:
for _, v := range foo {
//for kv, vv := range v.Imports {
//fmt.Printf("%s: %c\n", kv, vv)
//}
for kk, vv := range v.Files {
for _, i := range vv.Imports {
fmt.Printf("%s: %s\n", kk, i.Path.Value)
}
}
//fmt.Printf("%s: %c\n", k, v.Files)
}

Related

Golang nested map filter

package main
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"strings"
)
func main() {
fmt.Println(fecthData())
}
func fecthData() (map[string]interface{}, error) {
body := strings.NewReader("dil_kodu=tr")
req, err := http.NewRequest("POST", "https://www.haremaltin.com/dashboard/ajax/doviz", body)
if err != nil {
// handle err
return nil, err
}
req.Header.Set("X-Requested-With", "XMLHttpRequest")
resp, err := http.DefaultClient.Do(req)
if err != nil {
// handle err
return nil, err
}
defer resp.Body.Close()
jsonData, err := ioutil.ReadAll(resp.Body)
if err != nil {
panic(err)
return nil, err
}
var data map[string]interface{}
err = json.Unmarshal(jsonData, &data)
if err != nil {
return nil, err
}
return data, nil
}
You can see full code above and I have a go response as below and it is nested map as you see and want to reach "data-ATA5_ESKI-satis" value which is 34319. Is there anybody to help me.
Thank you for your time
A part of response below:
map[data:map[AEDTRY:map[alis:4.6271 code:AEDTRY dir:map[alis_dir: satis_dir:] dusuk:4.7116 kapanis:4.6224 satis:4.7271 tarih:17-06-2022 19:41:45 yuksek:4.7276] AEDUSD:map[alis:0.2680 code:AEDUSD dir:map[alis_dir: satis_dir:] dusuk:0.27 kapanis:0.268 satis:0.2700 tarih:17-06-2022 19:30:02 yuksek:0.27]... ALTIN:map[alis:1024.790 code:ALTIN dir:map[alis_dir:down satis_dir:down] dusuk:1029.05 kapanis:1032.13 satis:1030.650 tarih:17-06-2022 19:41:58 yuksek:1040] ATA5_ESKI:map[alis:33869 code:ATA5_ESKI dir:map[alis_dir:down satis_dir:down] dusuk:34266 kapanis:34112 satis:34319 tarih:17-06-2022 19:41:58 yuksek:34630] XPTUSD:map[alis:933 code:XPTUSD dir:map[alis_dir: satis_dir:] dusuk:936 kapanis:953 satis:936 tarih:17-06-2022 19:41:58 yuksek:957]] meta:map[fiyat_guncelleme:2000 fiyat_yayini:web_socket time:1.655484118278e+12 time_formatted:]]
for _, v := range data { // we need value part of the map
m, ok := v.(map[string]interface{}) // we need the convert the map
// into interface for iteration
if !ok {
fmt.Printf("Error %T", v)
}
for k, l := range m {
if k == "ATA_ESKI"{ // the value we want is inside of this map
a, ok := l.(map[string]interface{}) // interface convert again
if !ok {
fmt.Printf("Error %T", v)
}
for b,c := range a{
if b == "satis"{ // the value we want
fmt.Println("Price is", c)
}
}
}
}
}
We can get the value adding this iteration before "return data, nil" at the end but I think there must be easier methods for this.

Why is the file empty after writing to it with bufio.Writer?

file, err := os.OpenFile("filename.db", os.O_CREATE|os.O_APPEND, 0666)
if err != nil {
log.Fatal(err)
}
defer file.Close()
res := 0
writer := bufio.NewWriter(file)
for _, data := range manager {
bin, err := json.Marshal(data)
if err != nil {
log.Println(err)
return
}
res++
if debug {
log.Println(res)
}
fmt.Printf("%s\n", bin)
_, err = writer.Write(bin)
if err != nil {
log.Println(err)
}
_, _ = writer.WriteRune('\n')
}
playground
full code
The file filename.db is created (if didn't exist), but ...is empty...
Why could this happen?
Why is the file empty?
I tried this both on my home pc and a linux server
And in both cases it's empty
As per the suggestion from comment using writer.Flush results in foo and bar values being written in to the document filename.db.
package main
import (
"bufio"
"encoding/json"
"fmt"
"log"
"os"
)
type Valuable struct {
Value string `json:"value"`
}
var debug = true
var manager []Valuable
func main() {
manager = append(manager, Valuable{"foo"}, Valuable{"bar"})
file, err := os.OpenFile("filename.db", os.O_CREATE|os.O_APPEND, 0666)
if err != nil {
log.Fatal(err)
}
defer file.Close()
res := 0
writer := bufio.NewWriter(file)
defer writer.Flush()
for _, data := range manager {
bin, err := json.Marshal(data)
if err != nil {
log.Println(err)
return
}
res++
if debug {
log.Println(res)
}
fmt.Printf("%s\n", bin)
_, err = writer.Write(bin)
if err != nil {
log.Println(err)
}
_, _ = writer.WriteRune('\n')
}
}

Using Golang how do I get a list of Dataset tables with metadata

using the BigQuery Golang sdk, how does one get a list of tables in a dataset that also contains their metadata?
package main
import (
"context"
"cloud.google.com/go/bigquery"
"google.golang.org/api/iterator"
"fmt"
"encoding/json"
)
func main() {
tables, metas, err := tableMetadatas(context.Background(), "my-project", "my-dataset")
if err != nil {
fmt.Println("Error: %v", err)
} else {
fmt.Println("Tables:")
bt, _ := json.MarshalIndent(tables, "", " ")
fmt.Println(string(bt))
fmt.Println("Table Metadatas:")
bm, _ := json.MarshalIndent(metas, "", " ")
fmt.Println(string(bm))
}
}
// Gets a list of Tables & Their respective Metadata in a Dataset
func tableMetadatas(ctx context.Context, project string, dataset string) ([]*bigquery.Table, []*bigquery.TableMetadata, error) {
c, err := bigquery.NewClient(ctx, project)
if err != nil {
return nil, nil, err
}
metas := make([]*bigquery.TableMetadata, 0)
tables := make([]*bigquery.Table, 0)
d := c.Dataset(dataset)
it := d.Tables(ctx)
for {
t, err := it.Next()
if err == iterator.Done {
break;
}
if err != nil {
return nil, nil, err
}
m, err := t.Metadata(ctx)
if err != nil {
return nil, nil, err
}
tables = append(tables, t)
metas = append(metas, m)
}
return tables, metas, nil
}

Read and merge two Yaml files in go language

Assuming we have two yaml files
master.yaml
someProperty: "someVaue"
anotherProperty: "anotherValue"
override.yaml
someProperty: "overriddenVaue"
Is it possible to unmarshall, merge, and then write those changes to a file without having to define a struct for every property in the yaml file?
The master file has over 500 properties in it that are not at all important to the service at this point of execution, so ideally I'd be able to just unmarshal into a map, do a merge and write out in yaml again but I'm relatively new to go so wanted some opinions.
I've got some code to read the yaml into an interface but i'm unsure on the best approach to then merge the two.
var masterYaml interface{}
yamlBytes, _ := ioutil.ReadFile("master.yaml")
yaml.Unmarshal(yamlBytes, &masterYaml)
var overrideYaml interface{}
yamlBytes, _ = ioutil.ReadFile("override.yaml")
yaml.Unmarshal(yamlBytes, &overrideYaml)
I've looked into libraries like mergo but i'm not sure if that's the right approach.
I'm hoping that after the master I would be able to write out to file with properties
someProperty: "overriddenVaue"
anotherProperty: "anotherValue"
Assuming that you just want to merge at the top level, you can unmarshal into maps of type map[string]interface{}, as follows:
package main
import (
"io/ioutil"
"gopkg.in/yaml.v2"
)
func main() {
var master map[string]interface{}
bs, err := ioutil.ReadFile("master.yaml")
if err != nil {
panic(err)
}
if err := yaml.Unmarshal(bs, &master); err != nil {
panic(err)
}
var override map[string]interface{}
bs, err = ioutil.ReadFile("override.yaml")
if err != nil {
panic(err)
}
if err := yaml.Unmarshal(bs, &override); err != nil {
panic(err)
}
for k, v := range override {
master[k] = v
}
bs, err = yaml.Marshal(master)
if err != nil {
panic(err)
}
if err := ioutil.WriteFile("merged.yaml", bs, 0644); err != nil {
panic(err)
}
}
For a broader solution (with n input files), you can use this function. I have used #robox answer to do my solution:
func ReadValues(filenames ...string) (string, error) {
if len(filenames) <= 0 {
return "", errors.New("You must provide at least one filename for reading Values")
}
var resultValues map[string]interface{}
for _, filename := range filenames {
var override map[string]interface{}
bs, err := ioutil.ReadFile(filename)
if err != nil {
log.Info(err)
continue
}
if err := yaml.Unmarshal(bs, &override); err != nil {
log.Info(err)
continue
}
//check if is nil. This will only happen for the first filename
if resultValues == nil {
resultValues = override
} else {
for k, v := range override {
resultValues[k] = v
}
}
}
bs, err := yaml.Marshal(resultValues)
if err != nil {
log.Info(err)
return "", err
}
return string(bs), nil
}
So for this example you should call it with this order:
result, _ := ReadValues("master.yaml", "overwrite.yaml")
In the case you have an extra file newFile.yaml, you could also use this function:
result, _ := ReadValues("master.yaml", "overwrite.yaml", "newFile.yaml")
DEEP MERGE TWO YAML FILES
package main
import (
"fmt"
"io/ioutil"
"sigs.k8s.io/yaml"
)
func main() {
// declare two map to hold the yaml content
base := map[string]interface{}{}
currentMap := map[string]interface{}{}
// read one yaml file
data, _ := ioutil.ReadFile("conf.yaml")
if err := yaml.Unmarshal(data, &base); err != nil {
}
// read another yaml file
data1, _ := ioutil.ReadFile("conf1.yaml")
if err := yaml.Unmarshal(data1, &currentMap); err != nil {
}
// merge both yaml data recursively
base = mergeMaps(base, currentMap)
// print merged map
fmt.Println(base)
}
func mergeMaps(a, b map[string]interface{}) map[string]interface{} {
out := make(map[string]interface{}, len(a))
for k, v := range a {
out[k] = v
}
for k, v := range b {
if v, ok := v.(map[string]interface{}); ok {
if bv, ok := out[k]; ok {
if bv, ok := bv.(map[string]interface{}); ok {
out[k] = mergeMaps(bv, v)
continue
}
}
}
out[k] = v
}
return out
}

Reading CSV file in Go

Here is a code snippet that reads CSV file:
func parseLocation(file string) (map[string]Point, error) {
f, err := os.Open(file)
defer f.Close()
if err != nil {
return nil, err
}
lines, err := csv.NewReader(f).ReadAll()
if err != nil {
return nil, err
}
locations := make(map[string]Point)
for _, line := range lines {
name := line[0]
lat, laterr := strconv.ParseFloat(line[1], 64)
if laterr != nil {
return nil, laterr
}
lon, lonerr := strconv.ParseFloat(line[2], 64)
if lonerr != nil {
return nil, lonerr
}
locations[name] = Point{lat, lon}
}
return locations, nil
}
Is there a way to improve readability of this code? if and nil noise.
Go now has a csv package for this. Its is encoding/csv. You can find the docs here: https://golang.org/pkg/encoding/csv/
There are a couple of good examples in the docs. Here is a helper method I created to read a csv file and returns its records.
package main
import (
"encoding/csv"
"fmt"
"log"
"os"
)
func readCsvFile(filePath string) [][]string {
f, err := os.Open(filePath)
if err != nil {
log.Fatal("Unable to read input file " + filePath, err)
}
defer f.Close()
csvReader := csv.NewReader(f)
records, err := csvReader.ReadAll()
if err != nil {
log.Fatal("Unable to parse file as CSV for " + filePath, err)
}
return records
}
func main() {
records := readCsvFile("../tasks.csv")
fmt.Println(records)
}
Go is a very verbose language, however you could use something like this:
// predeclare err
func parseLocation(file string) (locations map[string]*Point, err error) {
f, err := os.Open(file)
if err != nil {
return nil, err
}
defer f.Close() // this needs to be after the err check
lines, err := csv.NewReader(f).ReadAll()
if err != nil {
return nil, err
}
//already defined in declaration, no need for :=
locations = make(map[string]*Point, len(lines))
var lat, lon float64 //predeclare lat, lon
for _, line := range lines {
// shorter, cleaner and since we already have lat and err declared, we can do this.
if lat, err = strconv.ParseFloat(line[1], 64); err != nil {
return nil, err
}
if lon, err = strconv.ParseFloat(line[2], 64); err != nil {
return nil, err
}
locations[line[0]] = &Point{lat, lon}
}
return locations, nil
}
//edit
A more efficient and proper version was posted by #Dustin in the comments, I'm adding it here for completeness sake:
func parseLocation(file string) (map[string]*Point, error) {
f, err := os.Open(file)
if err != nil {
return nil, err
}
defer f.Close()
csvr := csv.NewReader(f)
locations := map[string]*Point{}
for {
row, err := csvr.Read()
if err != nil {
if err == io.EOF {
err = nil
}
return locations, err
}
p := &Point{}
if p.lat, err = strconv.ParseFloat(row[1], 64); err != nil {
return nil, err
}
if p.lon, err = strconv.ParseFloat(row[2], 64); err != nil {
return nil, err
}
locations[row[0]] = p
}
}
playground
I basically copied my answer from here: https://www.dotnetperls.com/csv-go. For me, this was a better answer than what I found on stackoverflow.
import (
"bufio"
"encoding/csv"
"os"
"fmt"
"io"
)
func ReadCsvFile(filePath string) {
// Load a csv file.
f, _ := os.Open(filePath)
// Create a new reader.
r := csv.NewReader(f)
for {
record, err := r.Read()
// Stop at EOF.
if err == io.EOF {
break
}
if err != nil {
panic(err)
}
// Display record.
// ... Display record length.
// ... Display all individual elements of the slice.
fmt.Println(record)
fmt.Println(len(record))
for value := range record {
fmt.Printf(" %v\n", record[value])
}
}
}
I also dislike the verbosity of the default Reader, so I made a new type that is
similar to bufio#Scanner:
package main
import "encoding/csv"
import "io"
type Scanner struct {
Reader *csv.Reader
Head map[string]int
Row []string
}
func NewScanner(o io.Reader) Scanner {
csv_o := csv.NewReader(o)
a, e := csv_o.Read()
if e != nil {
return Scanner{}
}
m := map[string]int{}
for n, s := range a {
m[s] = n
}
return Scanner{Reader: csv_o, Head: m}
}
func (o *Scanner) Scan() bool {
a, e := o.Reader.Read()
o.Row = a
return e == nil
}
func (o Scanner) Text(s string) string {
return o.Row[o.Head[s]]
}
Example:
package main
import "strings"
func main() {
s := `Month,Day
January,Sunday
February,Monday`
o := NewScanner(strings.NewReader(s))
for o.Scan() {
println(o.Text("Month"), o.Text("Day"))
}
}
https://golang.org/pkg/encoding/csv
You can also read contents of a directory to load all the CSV files. And then read all those CSV files 1 by 1 with goroutines
csv file:
101,300.00,11000901,1155686400
102,250.99,11000902,1432339200
main.go file:
const sourcePath string = "./source"
func main() {
dir, _ := os.Open(sourcePath)
files, _ := dir.Readdir(-1)
for _, file := range files {
fmt.Println("SINGLE FILE: ")
fmt.Println(file.Name())
filePath := sourcePath + "/" + file.Name()
f, _ := os.Open(filePath)
defer f.Close()
// os.Remove(filePath)
//func
go func(file io.Reader) {
records, _ := csv.NewReader(file).ReadAll()
for _, row := range records {
fmt.Println(row)
}
}(f)
time.Sleep(10 * time.Millisecond)// give some time to GO routines for execute
}
}
And the OUTPUT will be:
$ go run main.go
SINGLE FILE:
batch01.csv
[101 300.00 11000901 1155686400]
[102 250.99 11000902 1432339200]
----------------- -------------- ---------------------- -------
---------------- ------------------- ----------- --------------
Below example with the Invoice struct
func main() {
dir, _ := os.Open(sourcePath)
files, _ := dir.Readdir(-1)
for _, file := range files {
fmt.Println("SINGLE FILE: ")
fmt.Println(file.Name())
filePath := sourcePath + "/" + file.Name()
f, _ := os.Open(filePath)
defer f.Close()
go func(file io.Reader) {
records, _ := csv.NewReader(file).ReadAll()
for _, row := range records {
invoice := new(Invoice)
invoice.InvoiceNumber = row[0]
invoice.Amount, _ = strconv.ParseFloat(row[1], 64)
invoice.OrderID, _ = strconv.Atoi(row[2])
unixTime, _ := strconv.ParseInt(row[3], 10, 64)
invoice.Date = time.Unix(unixTime, 0)
fmt.Printf("Received invoice `%v` for $ %.2f \n", invoice.InvoiceNumber, invoice.Amount)
}
}(f)
time.Sleep(10 * time.Millisecond)
}
}
type Invoice struct {
InvoiceNumber string
Amount float64
OrderID int
Date time.Time
}

Resources