I have a data of this format.
type PartsInfo struct {
Parts map[string]struct {
City string `yaml:"city"`
Services map[string]struct {
Disabled bool `yaml:"disabled"`
} `yaml:"services"`
} `yaml:"parts"`
}
I want to convert it into this format:
map[service]map[city][]parts where only not disabled status services need to be added. I have been trying different combinations but cant get it just the way i want.
I guess one thing I am not sure of is the destination format. Should I be using the map[service]map[city][]parts format, or would a struct be better ? I do not see how, but I have been told before the best way to pass data in go is using structs, not maps. Is that correct?
Is this what you want?
Go Playground: https://play.golang.org/p/N8mkD5pt1pD
package main
import "fmt"
type PartitionData struct {
Partitions map[string]Partition `yaml:"parts"`
}
type Partition struct {
City string `yaml:"city"`
Services map[string]map[string]struct {
Disabled bool `yaml:"disabled"`
} `yaml:"services"`
}
var testData = PartitionData{
Partitions: map[string]Partition{
"partition1": {City: "c1", Services: map[string]map[string]struct{
Disabled bool `yaml:"disabled"`
}{
"service1":{
"1":{
Disabled: true,
},
"2":{
Disabled: true,
},
},
"service2":{
"1":{
Disabled: true,
},
"2":{
Disabled: true,
},
},
}},
"partition2": {City: "c1", Services: map[string]map[string]struct{
Disabled bool `yaml:"disabled"`
}{
"service1":{
"1":{
Disabled: true,
},
"2":{
Disabled: true,
},
},
"service2":{
"1":{
Disabled: true,
},
"2":{
Disabled: true,
},
},
}},
},
}
func main() {
res:= make(map[string]map[string][]Partition)
for _,part := range testData.Partitions{
for serviceName :=range part.Services{
if _,found := res[serviceName];!found {
res[serviceName] = make(map[string][]Partition)
}
if _,found := res[serviceName][part.City];!found {
res[serviceName][part.City] = make([]Partition,0)
}
res[serviceName][part.City] = append(res[serviceName][part.City], part)
}
}
fmt.Println(res)
}
Related
I am trying to access a key element in Golang with the following schema via terraform config file:
"vehicles": {
Type: schema.TypeSet,
Optional: true,
MaxItems: 5,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"car": {
Type: schema.TypeList,
Optional: true,
MaxItems: 2,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"make": {
Type: schema.TypeString,
Optional: true,
},
"model": {
Type: schema.TypeString,
Optional: true,
},
},
},
},
},
},
}
In config file,
resource "type_test" "type_name" {
vehicles {
car {
make = "Toyota"
model = "Camry"
}
car {
make = "Nissan"
model = "Rogue"
}
}
}
I want to iterate over the list and access the vehicles map via Golang.
The terraform crashes with the below code:
vehicles_map, ok = d.getOK("vehicles")
if ok {
vehicleSet := vehicles_d.(*schema.Set)List()
for i, vehicle := range vehicleSet {
mdi, ok = vehicle.(map[string]interface{})
if ok {
log.Printf("%v", mdi["vehicles"].(map[string]interface{})["car"])
}
}
Crash Log:
2019-12-25T21 [DEBUG] plugin.terraform-provider: panic: interface conversion: interface {} is nil, not map[string]interface {}
for line "log.Printf("%v", mdi["vehicles"].(map[string]interface{})["car"])"
I want to print and access the each vehicles element in the config file, any help would be appreciated.
d.getOK("vehicles") already performs the indexing with "vehicles" key, which results in a *schema.Set. Calling its Set.List() method, you get a slice (of type []interface{}). Iterating over its elements will give you values that represent a car, modeled with type map[string]interface{}. So inside the loop you just have to type assert to this type, and not index again with "vehicles" nor with "car".
Something like this:
for i, vehicle := range vehicleSet {
car, ok := vehicle.(map[string]interface{})
if ok {
log.Printf("model: %v, make: %v\n", car["model"], car["make"])
}
}
Note: edited after a comment from #JimB
I am trying to build a new Terraform provider in Go. The resource that I need is a bit complex. It includes structures, arrays within structures, arrays and structures within arrays. When I run Terraform, it gives me errors, for example:
panic: Error reading level config: '' expected type 'string', got unconvertible type 'map[string]interface {}'. I can't figure out what I am doing wrong.
When I make the structures simple enough, they do work, but I need this resource and I'm sure there's a way to do it, and I'm just missing something perhaps trivial.
-- Here's the Terraform structure:
resource "struct" "my-struct-1" {
name = "MyFile"
complexstruct = [{
onebool = true
onearray = [{
name = "name-1"
value = "value-1"
}, {
name = "name-2"
value = "value-2"
}]
internalstruct = [{
attr1 = false
attr2 = "attribute"
}]
}]
array = [
{
attrib1 = "attrib1.1"
attrib2 = false
attrib3 = "attrib1.3"
},
{
attrib1 = "attrib2.1"
attrib2 = true
attrib3 = "attrib2.3"
}
]
}
-- Here is the Schema definition in go, as simplified as I could make it:
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Required: true,
},
"complexstruct": {
Type: schema.TypeList,
MaxItems: 1,
Optional: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"onebool": {
Type: schema.TypeBool,
Optional: true,
},
"onearray": {
Type: schema.TypeList,
Optional: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Optional: true,
},
"value": {
Type: schema.TypeString,
Optional: true,
},
},
},
},
"internalstruct": {
Type: schema.TypeList,
MaxItems: 1,
Optional: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"attr1": {
Type: schema.TypeBool,
Optional: true,
},
"attr2": {
Type: schema.TypeString,
Optional: true,
},
},
},
},
},
},
},
"array": {
Type: schema.TypeList,
Optional: true,
Elem: map[string]*schema.Schema{
"attrib1": {
Type: schema.TypeString,
Optional: true,
},
"attrib2": {
Type: schema.TypeBool,
Optional: true,
},
"attrib3": {
Type: schema.TypeString,
Optional: true,
},
},
},
},
----- And lastly, here's the code that I am trying to use (however, I think the problem is before it starts with the code itself):
fname := d.Get("name").(string)
d.SetId(fname)
if _, ok := d.GetOk("complexstruct"); ok {
fc := d.Get("complexstruct").([]map[string]interface{})
myBool := fc[0]["onebool"].(bool)
myArray := fc[0]["onearray"].([]map[string]interface{})
type fcS struct {
Name string `json:"name"`
Value string `json:"value"`
}
fcs := make([]fcS, len(myArray))
for ifc := range myArray {
fcs[ifc].Name = myArray[ifc]["name"].(string)
fcs[ifc].Value = myArray[ifc]["value"].(string)
}
myInternalStruct := fc[0]["internalstruct"].([]map[string]interface{})
type misS struct {
Attr1 bool `json:"attr1"`
Attr2 string `json:"attr2"'`
}
var mis misS
mis.Attr1 = myInternalStruct[0]["attr1"].(bool)
mis.Attr2 = myInternalStruct[0]["attr2"].(string)
type myWholeStruct struct {
MyBool bool `json:"onebool"`
MyArray []fcS `json:"onearray"`
MyInter misS `json:"internalstruct"`
}
outp := myWholeStruct{
myBool,
fcs,
mis,
}
o, _ := json.Marshal(outp)
writeStringToFile(string(o), fname, false)
}
Well, I expect the create function to create a file with the name taken from the name attribute, and the data a JSON representation of the values of the other Terraform attributes. Instead I am getting errors as specified above.
Bear with me, I will explain this the best I can. Please let me know if more information is needed, I am trying to keep this as brief as possible.
I am using Apollo Server and the 'apollo-datasource-rest' plugin to access a REST API. When attempting to get the property values from a nested array of objects I get a null response for each field/property. In addition, the array being queried is only showing a single iteration when multiple are available.
The field in question is the 'cores' field within the Rocket type, i.e., launch.rocket.firstStage.cores
I have attempted various ways of mapping through 'cores' (thinking this was what it wanted) with no success.
To keep things short and simple I'm only including the code for the specific issue. All other parts of the query are operating as expected.
You can view the API response I am hitting here: https://api.spacexdata.com/v3/launches/77
schema.js
const { gql } = require('apollo-server');
const typeDefs = gql`
type Query {
singleLaunch(flightNumber: Int!): Launch
}
type Launch {
flightNumber: Int!
rocket: Rocket
}
type Rocket {
firstStage: Cores
}
type Cores {
cores: [CoreFields]
}
type CoreFields {
flight: Int
gridfins: Boolean
legs: Boolean
reused: Boolean
landingType: String
landingVehicle: String
landingSuccess: Boolean
}
`;
module.exports = typeDefs;
Data Source - launch.js
const { RESTDataSource } = require('apollo-datasource-rest');
class LaunchAPI extends RESTDataSource {
constructor() {
super();
this.baseURL = 'https://api.spacexdata.com/v3/';
}
async getLaunchById({ launchId }) {
const res = await this.get('launches', {
flight_number: launchId,
});
return this.launchReducer(res[0]);
}
launchReducer(launch) {
return {
flightNumber: launch.flight_number || 0,
rocket: {
firstStage: {
cores: [
{
flight: launch.rocket.first_stage.cores.flight,
gridfins: launch.rocket.first_stage.cores.gridfins,
legs: launch.rocket.first_stage.cores.legs,
landingType: launch.rocket.first_stage.cores.landing_type,
landingVehicle: launch.rocket.first_stage.cores.landing_vehicle,
landingSuccess: launch.rocket.first_stage.cores.landing_success,
},
],
},
};
}
}
module.exports = LaunchAPI;
resolvers.js
module.exports = {
Query: {
singleLaunch: (_, { flightNumber }, { dataSources }) =>
dataSources.launchAPI.getLaunchById({ launchId: flightNumber }),
},
};
Query
query GetLaunchById($flightNumber: Int!) {
singleLaunch(flightNumber: $flightNumber) {
flightNumber
rocket {
firstStage {
cores {
flight
gridfins
legs
reused
landingType
landingVehicle
landingSuccess
}
}
}
}
}
Expected Result
{
"data": {
"singleLaunch": {
"flightNumber": 77,
"rocket": {
"firstStage": {
"cores": [
{
"flight": 1,
"gridfins": true,
"legs": true,
"reused": true,
"landingType": "ASDS",
"landingVehicle": "OCISLY",
"landSuccess": true,
},
{
"flight": 1,
"gridfins": true,
"legs": true,
"reused": false,
"landingType": "RTLS",
"landingVehicle": "LZ-1",
"landSuccess": true
},
{
"flight": 1,
"gridfins": true,
"legs": true,
"reused": false,
"landingType": "RTLS",
"landingVehicle": "LZ-2",
"landSuccess": true
},
]
}
},
}
}
}
Actual Result (Through GraphQL Playground)
{
"data": {
"singleLaunch": {
"flightNumber": 77,
"rocket": {
"firstStage": {
"cores": [
{
"flight": null,
"gridfins": null,
"legs": null,
"reused": null,
"landingType": null,
"landingVehicle": null,
"landingSuccess": null
}
]
}
},
}
}
}
Any suggestions as to what I am doing wrong here would be greatly appreciated. Again, let me know if more information is needed.
Thank you!
Missing base url
There should be
await this.get( this.baseURL + 'launches'
IMHO there should be a map used within launchReducer to return an array, sth like:
launchReducer(launch) {
return {
flightNumber: launch.flight_number || 0,
rocket: {
firstStage: {
cores: launch.rocket.first_stage.cores.map(core => ({
flight: core.flight,
gridfins: core.gridfins,
legs: core.legs,
landingType: core.landing_type,
landingVehicle: core.landing_vehicle,
landSuccess: core.land_success,
})),
},
},
};
}
.map(core => ({ is for returning object [literal], the same as/shorter version of .map(core => { return {
I am writing a custom provider with 2 level deeply nested map. I am able to expand the schema when calling create function. However, I am having issues when I try to set this value from read function after this resource has been created. I tried to follow Terraform documentation steps, "Complex read" section but I am getting error Invalid address to set: []string{"docker_info", "0", "port_mapping"}.
Schema looks like this:
"docker_info": {
Type: schema.TypeList,
Required: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"image": &schema.Schema{
Type: schema.TypeString,
Required: true,
},
"force_pull_image": &schema.Schema{
Type: schema.TypeBool,
Optional: true,
Default: "false",
},
"network": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Default: "BRIDGE",
ValidateFunc: validateDockerNetwork,
},
// We use typeSet because this parameter can be unordered list and must be unique.
"port_mapping": &schema.Schema{
Type: schema.TypeSet,
Optional: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"host_port": &schema.Schema{
Type: schema.TypeInt,
Required: true,
},
"container_port": &schema.Schema{
Type: schema.TypeInt,
Required: true,
},
"container_port_type": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ValidateFunc: validateSingularityPortMappingType,
},
"host_port_type": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ValidateFunc: validateSingularityPortMappingType,
},
"protocol": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ValidateFunc: validateSingularityPortProtocol,
Default: "tcp",
},
},
},
},
},
},
},
in my read function, I have:
d.Set("docker_info", flattenDockerInfo(ContainerInfo.DockerInfo))
func flattenDockerInfo(in singularity.DockerInfo) []interface{} {
var out = make([]interface{}, 0, 0)
m := make(map[string]interface{})
m["network"] = in.Network
m["image"] = in.Image
m["force_pull_image"] = in.ForcePullImage
if len(in.PortMappings) > 0 {
m["port_mapping"] = flattenDockerPortMappings(in.PortMappings)
}
out = append(out, m)
return out
}
func flattenDockerPortMappings(in []singularity.DockerPortMapping []map[string]interface{} {
var out = make([]map[string]interface{}, len(in), len(in))
for i := range in {
m := make(map[string]interface{})
m["container_port"] = v.ContainerPort
m["container_port_type"] = v.ContainerPortType
m["host_port"] = v.HostPort
m["host_port_type"] = v.HostPortType
m["protocol"] = v.Protocol
out[i] = m
}
return out
}
singularityDocker struct:
type DockerInfo struct {
Parameters map[string]string
`json:"parameters,omitempty"`
ForcePullImage bool
`json:"forcePullImage,omitempty"`
SingularityDockerParameters []SingularityDockerParameter
`json:"dockerParameters,omitEmpty"`
Privileged bool
`json:"privileged,omitEmpty"`
Network string
`json:"network,omitEmpty"` //Value can be BRIDGE, HOST, or NONE
Image string
`json:"image"`
PortMappings []DockerPortMapping
`json:"portMappings,omitempty"`
}
type DockerPortMapping struct {
ContainerPort int64 `json:"containerPort"`
ContainerPortType string `json:"containerPortType,omitempty"`
HostPort int64 `json:"hostPort"`
HostPortType string `json:"hostPortType,omitempty"`
Protocol string `json:"protocol,omitempty"`
}
I expect to see something like
"docker_info.0.port_mapping.3218452487.container_port": "8888",
"docker_info.0.port_mapping.3218452487.container_port_type": "LITERAL",
"docker_info.0.port_mapping.3218452487.host_port": "0",
"docker_info.0.port_mapping.3218452487.host_port_type": "FROM_OFFER",
"docker_info.0.port_mapping.3218452487.protocol": "tcp",
I found out that by adding below code to create a typeSet
m["port_mapping"] = schema.NewSet(portMappingHash, []interface{}{flattenDockerPortMappings(in.PortMappings)})
func portMappingHash(v interface{}) int {
var buf bytes.Buffer
x := v.([]map[string]interface{})
for i := range x {
buf.WriteString(fmt.Sprintf("%s-%d", "test", i))
}
return hashcode.String(buf.String())
}
I now get docker_info.0.port_mapping.2384314926: '' expected a map, got 'slice'
I may be missing something, but can not find any information on Apollo docs about the way to set a many-to-many relation when creating a new entry.
When the relation is one-to-many it is as simple as setting the ID of the one-side of the relationship in the many-side object.
But let's pretend I am working with Books and Authors, how would I write a graphql query that creates a Book for one (or many?) Authors?
This should probably happen at the API layer on the GraphQL server (i.e. schema). For many-to-many relationships, you should have a "join" type to denote the BookAuthor many-to-many relationship, and then add an entry to that join type.
Essentially then you'll have a type called Book, another called Author, and finally one more called BookAuthor. And you can add a few mutations to be able to manage that relationship. Perhaps...
addToBookAuthorConnection
updateBookAuthorConnection
removeFromBookAuthorConnection
This is a conventional setup using a Relay-spec compliant API. You can read more about how to structure your API for many-to-many relationships here.
Then, you only need to call the addToBookAuthorConnection mutation from Apollo instead to be able to add to that many-to-many connection on your frontend.
Hope this helps!
If u r using apollo graph server with one to many relations then connectors.js, resolvers.js and schema.js files as given formats
schema.js
const typeDefinitions = `
type Author {
authorId: Int
firstName: String
lastName: String
posts: [Post]
}
type Post {
postId: Int
title: String
text: String
views: Int
author: Author
}
input postInput{
title: String
text: String
views: Int
}
type Query {
author(firstName: String, lastName: String): [Author]
posts(postId: Int, title: String, text: String, views: Int): [Post]
}
type Mutation {
createAuthor(firstName: String, lastName: String, posts:[postInput]): Author
updateAuthor(authorId: Int, firstName: String, lastName: String, posts:[postInput]): String
}
schema {
query: Query
mutation:Mutation
}
`;
export default [typeDefinitions];
resolvers.js
import { Author } from './connectors';
import { Post } from './connectors';
const resolvers = {
Query: {
author(_, args) {
return Author.findAll({ where: args });
},
posts(_, args) {
return Post.findAll({ where: args });
}
},
Mutation: {
createAuthor(_, args) {
console.log(args)
return Author.create(args, {
include: [{
model: Post,
}]
});
},
updateAuthor(_, args) {
var updateProfile = { title: "name here" };
console.log(args.authorId)
var filter = {
where: {
authorId: args.authorId
},
include: [
{ model: Post }
]
};
Author.findOne(filter).then(function (product) {
Author.update(args, { where: { authorId: args.authorId } }).then(function (result) {
product.posts[0].updateAttributes(args.posts[0]).then(function (result) {
//return result;
})
});
})
return "updated";
},
},
Author: {
posts(author) {
return author.getPosts();
},
},
Post: {
author(post) {
return post.getAuthor();
},
},
};
export default resolvers;
connectors.js
import rp from 'request-promise';
var Sequelize = require('sequelize');
var db = new Sequelize('test', 'postgres', 'postgres', {
host: '192.168.1.168',
dialect: 'postgres',
pool: {
max: 5,
min: 0,
idle: 10000
}
});
const AuthorModel = db.define('author', {
authorId: { type: Sequelize.INTEGER, primaryKey: true, autoIncrement: true, field: "author_id" },
firstName: { type: Sequelize.STRING, field: "first_name" },
lastName: { type: Sequelize.STRING, field: "last_name" },
},{
freezeTableName: false,
timestamps: false,
underscored: false,
tableName: "author"
});
const PostModel = db.define('post', {
postId: { type: Sequelize.INTEGER, primaryKey: true, autoIncrement: true, field: "post_id" },
text: { type: Sequelize.STRING },
title: { type: Sequelize.STRING },
views: { type: Sequelize.INTEGER },
},{
freezeTableName: false,
timestamps: false,
underscored: false,
tableName: "post"
});
AuthorModel.hasMany(PostModel, {
foreignKey: 'author_id'
});
PostModel.belongsTo(AuthorModel, {
foreignKey: 'author_id'
});
const Author = db.models.author;
const Post = db.models.post;
export { Author, Post };