Bigquery Entity Tag - go

When I loaded my data from Cloud Storage into Bigquery, the click_url field turned into Record type when get created even though it's a string (maybe because of the noindex not sure tho). When I try to insert the data into BigQuery using Inserter. I got this error message:
Cannot convert std::string to a record field:optional .Msg_0_CLOUD_QUERY_TABLE.Msg_1_CLOUD_QUERY_TABLE_click_url click_url = 1
Table in bigquery:
Schema:
Here's the code:
type Product struct {
Name string `datastore:"name" bigquery:"name"`
ClickUrl string `datastore:"click_url,noindex" bigquery:"click_url"`
DateAdded time.Time `datastore:"date_added" bigquery:"date_added"`
}
func insertRows(data interface{}) error {
projectID := "my-project-id"
datasetID := "mydataset"
tableID := "mytable"
ctx := context.Background()
client, err := bigquery.NewClient(ctx, projectID)
if err != nil {
return fmt.Errorf("bigquery.NewClient: %v", err)
}
defer client.Close()
inserter := client.Dataset(datasetID).Table(tableID).Inserter()
if err := inserter.Put(ctx, data); err != nil {
return err
}
return nil
}
func main() {
product := Product{"product_name", "click_url", "date_added_value"} // Example data from datastore
if err := insertRows(product); err != nil {
fmt.Println(err)
}
}
What should I put on the entity tag "bigquery:click_url" to make this work?

Because the ClickUrl in BigQuery is a STRUCT type, which has key-value pairs.
Maybe try this?
type Product struct {
Name string `datastore:"name" bigquery:"name"`
ClickUrl struct {
String string
Text string
Provided string
} `datastore:"click_url,noindex" bigquery:"click_url"`
DateAdded time.Time `datastore:"date_added" bigquery:"date_added"`
}

Related

httptest.NewRequest issues with query parameters

I'm using the GIN framework with a Postgres DB and GORM as an ORM.
One of the routes accepts query parameters. When I search for this in my browser, I get the expected results: an array of json objects. Here is the route:
/artworks/?limit=10&last_id=1
However, when I try to test the handler used by that route, I get the following error:
routes_test.go:184: [ERROR] Unable to unmarshal data to artworks: json: cannot unmarshal object into Go value of type []models.Artwork
The query that the ORM is trying to run in the test function is the folowing:
SELECT * FROM "artwork_migrate_artwork" WHERE id = ''
So when I run the request in the browser, it properly pulls the query parameters and then the ORM runs the proper sql query. But when using httptest.NewRequest it seems like the query parameters are not used.
Here is my test function:
func TestGetArtworks(t *testing.T) {
dsn := fmt.Sprintf("host=%s user=%s password=%s dbname=%s port=%s sslmode=%s TimeZone=%s", env_var.Host, env_var.User, env_var.Password, env_var.DBname, env_var.Port, env_var.SSLMODE, env_var.TimeZone)
db, err := gorm.Open(postgres.Open(dsn), &gorm.Config{})
if err != nil {
panic("failed to connect to db")
}
route := "/artworks/"
handler := handlers.GetArtwork(db)
router := setupGetRouter(handler, route)
writer := httptest.NewRecorder()
req := httptest.NewRequest(http.MethodGet, "/artworks/?limit=10&last_id=1", nil)
fmt.Println(req)
router.ServeHTTP(writer, req)
assert.Equal(t, 200, writer.Code)
data, err := ioutil.ReadAll(writer.Body)
if err != nil {
t.Errorf("\u001b[31m[Error] Unable to read writer.Body: %s", err)
}
// no body can be unmarshalled
fmt.Println("Here is the body:", writer.Body.String())
var artworks []models.Artwork
if err := json.Unmarshal(data, &artworks); err != nil {
t.Errorf("\u001b[31m[ERROR] Unable to unmarshal data to artworks: %s", err)
}
assert.Equal(t, 10, len(artworks))
}
Here is my route handler:
func GetArtworks(db *gorm.DB) gin.HandlerFunc {
return func(c *gin.Context) {
limit, err := strconv.Atoi(c.Query("limit"))
if err != nil {
panic(err)
}
last_id := c.Query("last_id")
var artworks []models.Artwork
db.Where("id > ?", last_id).Limit(limit).Find(&artworks)
c.JSON(http.StatusOK, artworks)
}
}
router.GET("/artworks", han.GetArtworks(db))
Here is the model struct:
type Artwork struct {
ID int `json:"id"`
Title string `json:"title"`
Nationality string `json:"nationality"`
Artist_Bio string `json:"artist_bio"`
Desc string `json:"desc"`
Culture string `json:"culture"`
Gender string `json:"gender"`
Nation string `json:"nation"`
Medium string `json:"medium"`
Date_of_Release string `json:"date_of_release"`
Image string `json:"image"`
Image_Small string `json:"image_small"`
Last_Modified time.Time `json:"last_modified"`
Artist_ID int `json:"artist_id"`
Source_ID int `json:"source_id"`
}
#Brits is correct: It was due to a misspelled handler
handler := handlers.GetArtwork(db)
should have been handler := handlers.GetArtworks(db)

How to list all the items in a table with pagination

I'm trying to list all the items in a DynamoDB table with pagination, and here below is my attempt:
const tableName = "RecordingTable"
type Recording struct {
ID string `dynamodbav:"id"`
CreatedAt string `dynamodbav:"createdAt"`
UpdatedAt string `dynamodbav:"updatedAt"`
Duration int `dynamodbav:"duration"`
}
type RecordingRepository struct {
ctx context.Context
svc *dynamodb.Client
}
func NewRecordingRepository(ctx context.Context) (*RecordingRepository, error) {
cfg, err := config.LoadDefaultConfig(ctx)
if err != nil {
return nil, err
}
return &RecordingRepository{ctx, dynamodb.NewFromConfig(cfg)}, nil
}
func (r *RecordingRepository) List(page int, size int) ([]Recording, error) {
size32 := int32(size)
queryInput := &dynamodb.QueryInput{
TableName: aws.String(tableName),
Limit: &size32,
}
recordings := []Recording{}
queryPaginator := dynamodb.NewQueryPaginator(r.svc, queryInput)
for i := 0; queryPaginator.HasMorePages(); i++ {
result, err := queryPaginator.NextPage(r.ctx)
if err != nil {
return nil, err
}
if i == page {
if result.Count > 0 {
for _, v := range result.Items {
recording := Recording{}
if err := attributevalue.UnmarshalMap(v, &recording); err != nil {
return nil, err
}
recordings = append(recordings, recording)
}
}
break
}
}
return recordings, nil
}
When I run the code above, I get the following error message:
api error ValidationException: Either the KeyConditions or KeyConditionExpression parameter must be specified in the request.
But why should I specify a KeyConditionExpression when I want to get all the items? Is there another way to go or a workaround this?
Query does need your keys. It is meant to find specific items in your DynamoDB. To get all items in your DynamoDB, you need to use the Scan operation.
This should be easily fixed in your code.
Instead of QueryInput use ScanInput and instead of NewQueryPaginator use NewScanPaginator.
Just replaced QueryInput with ScanInput and QueryPaginator with ScanPaginator.

Golang Unmarshal an JSON response, then marshal with Struct field names

So I am hitting an API that returns a JSON response and I am unmarshalling it into a struct like so:
package main
type ProcessedRecords struct {
SLMIndividualID string `json:"individual_id"`
HouseholdPosition int `json:"Household Position"`
IndividualFirstName string `json:"individual_first_name"`
}
func main() {
req, _ := http.NewRequest(method, url, payload)
res, err := client.Do(req)
if err != nil {
fmt.Println(err)
}
defer res.Body.Close()
body, err := ioutil.ReadAll(res.Body)
if err != nil {
fmt.Println(err)
}
fmt.Println(body)
var responseObject Response
json.Unmarshal(body, &responseObject)
fmt.Println(responseObject)
which works great. However I need to marshal this struct again but I want to use the Struct Fields as keys instead of the json: ... fields. I am using the following code:
recordsInput := []*firehose.Record{}
for i := 0; i < len(records); i++ {
if len(recordsInput) == 500 {
* code to submit records, this part works fine *
}
b, err := json.Marshal(records[i])
if err != nil {
log.Printf("Error: %v", err)
}
record := &firehose.Record{Data: b}
recordsInput = append(recordsInput, record)
}
This does submit records successfully but it's in the format:
{"individual_id":"33c05b49-149b-480f-b1c2-3a3b30e0cb6f","Household Position":1...}
and I'd like it in the format:
{"SLMIndividualId":"33c05b49-149b-480f-b1c2-3a3b30e0cb6f","HouseholdPosition":1...}
How can I achieve this?
Those tags say how the struct should be marshalled, so if they are present, that is how the output will be. You'll need to convert it to a matching struct that does not have the json: tags:
type ProcessedRecords struct {
SLMIndividualID string `json:"individual_id"`
HouseholdPosition int `json:"Household Position"`
IndividualFirstName string `json:"individual_first_name"`
}
type ProcessedRecordsOut struct {
SLMIndividualID string
HouseholdPosition int
IndividualFirstName string
}
func process() {
var in ProcessedRecords
json.Unmarshal(data, &in)
// Convert to same type w/o tags
out := ProcessedRecordsOut(in)
payload, _ := json.Marshal(out)
// ...
}
See a working example here: https://play.golang.org/p/p0Fc8DJotYE
You can omit fields one-way by defining a custom type and implementing the correct interface, e.g.
package main
import (
"encoding/json"
"fmt"
)
type Animal struct {
Name ReadOnlyString
Order string
}
type ReadOnlyString string
func (ReadOnlyString) UnmarshalJSON([]byte) error { return nil }
func main() {
x := Animal{"Bob", "First"}
js, err := json.Marshal(&x)
if err != nil {
fmt.Println("error:", err)
}
fmt.Printf("%s\n", js)
var jsonBlob = []byte(`{"Name": "Platypus", "Order": "Monotremata"}`)
if err := json.Unmarshal(jsonBlob, &x); err != nil {
fmt.Println("error:", err)
}
fmt.Printf("%#v\n\n", x)
}
https://go.dev/play/p/-mwBL0kIqM
Found this answer here: https://github.com/golang/go/issues/19423#issuecomment-284607677

Is there something like sql.NullJson akin to sql.NullString in golang?

I am querying from postgres using golang, one of the field contains json that can sometimes be NULL
Like this
row := db.QueryRow(
"select my_string, my_json from my_table where my_string = $1",
my_id)
var my_string sql.NullString
var myjson MyJsonStruct
err := row.Scan(&my_string2, &myjson)
But I am getting
sql: Scan error on column index 2, name "my_json": unsupported Scan, storing driver.Value type <nil> into type *main.MyJsonStruct
I checked https://godoc.org/database/sql but didn't find sql.NullJson What is a go way of dealing with this situation?
No, there is no sql.json. I think the best way to dealing with json column in db is to implement valuer and scanner. so something like this :
// Scan implements database/sql.Scanner interface
func (m *MyJsonStruct) Scan(src interface{}) error {
if src == nil {
return nil
}
data, ok := src.([]byte)
if !ok {
return errors.New("type assertion to []byte failed")
}
var myJsonStruct MyJsonStruct
if err := json.Unmarshal(data, &myJsonStruct); err != nil {
return fmt.Errorf("unmarshal myJsonStruct: %w", err)
}
*m = myJsonStruct
return nil
}
// Value implements database/sql/driver.Valuer interface
func (m MyJsonStruct) Value() (driver.Value, error) {
data, err := json.Marshal(m)
if err != nil {
return nil, fmt.Errorf("marshal myJsonStruct: %w", err)
}
return data, nil
}

Read flattened entity from cloud datastore in golang

func (db *dataStore) AddAcceptance(ctx context.Context, req *acceptance.PolicyAcceptance) (uint64, error) {
accpKey := datastore.IncompleteKey("Acceptance", nil)
key, err := db.Put(context.Background(), accpKey, req);
if err != nil {
log.Fatalf("Failed to save Acceptance: %v", err)
}
accpKey = key
val := uint64(accpKey.ID)
return val, err
}
type PolicyAcceptance struct {
Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"`
PolicyNumber int64 `protobuf:"varint,2,opt,name=policyNumber" json:"policyNumber,omitempty"`
Version string `protobuf:"bytes,3,opt,name=version" json:"version,omitempty"`
SignerData *SignerData `protobuf:"bytes,4,opt,name=signerData" json:"signerData,omitempty" datastore:",flatten"`
GroupID int64 `protobuf:"varint,5,opt,name=groupID" json:"groupID,omitempty"`
LocationID int64 `protobuf:"varint,6,opt,name=locationID" json:"locationID,omitempty"`
BusinessId int64 `protobuf:"varint,7,opt,name=businessId" json:"businessId,omitempty"`
AcceptedDate *google_protobuf.Timestamp `protobuf:"bytes,8,opt,name=acceptedDate" json:"acceptedDate,omitempty" datastore:",flatten"`
IssuerName string `protobuf:"bytes,9,opt,name=issuerName" json:"issuerName,omitempty"`
Place string `protobuf:"bytes,10,opt,name=place" json:"place,omitempty"`
}
type SignerData struct {
Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
Email string `protobuf:"bytes,2,opt,name=email" json:"email,omitempty"`
Type string `protobuf:"bytes,3,opt,name=type" json:"type,omitempty"`
Id int64 `protobuf:"varint,4,opt,name=id" json:"id,omitempty"`
}
datastore:",flatten" saves data as flattened in data store. The property names becomes flattened with . like SignerData.Id as property name but when it's read from data store, how can I map it back to struct? It fails throwing an error like:
SignerData.Id could not be found as a key in struct. Error: No such
struct field.
func (db *dataStore) GetAcceptanceBySignerData(ctx context.Context, req *acceptance.SignerData) (*acceptance.ListOfPolicyAcceptance, error) {
query := datastore.NewQuery("Acceptance").Filter("SignerData.Id =", req.Id)
var accpArr acceptance.ListOfPolicyAcceptance
var err error
it := db.Run(ctx, query)
for {
var accept acceptance.PolicyAcceptance
_, err := it.Next(&accept)
if err == iterator.Done {
break
}
if err != nil {
log.Fatalf("Error fetching : %v", err)
}
accpArr.AcceptanceList = append(accpArr.AcceptanceList, &accept)
}
return &accpArr, err
}

Resources