Generate grpc-gateway, swagger and grpc-go files with bazel - go

I have a mono repo with some services (service-0 to service-4). There is a proto directory where are the proto files are stored. The proto-files are in the respective subfolders.
The directory is structured as followed:
.
├── BUILD.bazel
├── gateway
│   ├── .idea
│   ├── BUILD.bazel
│   ├── gateway.iml
│   ├── go.mod
│   ├── go.sum
│   └── main.go
├── gen
│   └── pb-go
├── service-0
│   └── .idea
├── service-1
│   └── .idea
├── service-2
│   └── .idea
├── service-3
│   └── .idea
├── service-4
│   └── .idea
├── Makefile
├── proto
│   ├── service-0
│   │   ├── BUILD.bazel
│   │   └── service-0.proto
│   ├── service-1
│   │   ├── BUILD.bazel
│   │   └── service-1.proto
│   ├── service-2
│   │   ├── BUILD.bazel
│   │   └── service-2.proto
│   ├── service-3
│   │   ├── BUILD.bazel
│   │   └── service-3.proto
│   └── service-4
│      ├── BUILD.bazel
│      └── service-4.proto
├── README.md
├── scripts
│   └── generate-go.sh
├── test
├── ui
│   ├── BUILD.bazel
│   ├── node_modules
│   ├── package.json
│   ├── package-lock.json
│   ├── public
│   ├── README.md
│   ├── src
│   ├── stdout.log
│   └── tsconfig.json
└── WORKSPACE
My Workspace file looks like this:
workspace(
name = "tool",
managed_directories = {"#npm": ["ui:node_modules"]},
)
load("#bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
## go rules
http_archive(
name = "io_bazel_rules_go",
sha256 = "08369b54a7cbe9348eea474e36c9bbb19d47101e8860cec75cbf1ccd4f749281",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/rules_go/releases/download/v0.24.0/rules_go-v0.24.0.tar.gz",
"https://github.com/bazelbuild/rules_go/releases/download/v0.24.0/rules_go-v0.24.0.tar.gz",
],
)
## gazelle
http_archive(
name = "bazel_gazelle",
sha256 = "d4113967ab451dd4d2d767c3ca5f927fec4b30f3b2c6f8135a2033b9c05a5687",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/bazel-gazelle/releases/download/v0.22.0/bazel-gazelle-v0.22.0.tar.gz",
"https://github.com/bazelbuild/bazel-gazelle/releases/download/v0.22.0/bazel-gazelle-v0.22.0.tar.gz",
],
)
load("#io_bazel_rules_go//go:deps.bzl", "go_register_toolchains", "go_rules_dependencies")
load("#bazel_gazelle//:deps.bzl", "gazelle_dependencies")
go_rules_dependencies()
go_register_toolchains()
gazelle_dependencies()
####### Protobuf rules
http_archive(
name = "rules_proto_grpc",
urls = ["https://github.com/rules-proto-grpc/rules_proto_grpc/archive/1.0.2.tar.gz"],
sha256 = "5f0f2fc0199810c65a2de148a52ba0aff14d631d4e8202f41aff6a9d590a471b",
strip_prefix = "rules_proto_grpc-1.0.2",
)
load("#rules_proto_grpc//:repositories.bzl", "rules_proto_grpc_toolchains", "rules_proto_grpc_repos")
rules_proto_grpc_toolchains()
rules_proto_grpc_repos()
########
####### go grpc rules
load("#rules_proto_grpc//go:repositories.bzl", rules_proto_grpc_go_repos="go_repos")
rules_proto_grpc_go_repos()
#####################
################# GRPC-GATEWAY
load("#rules_proto_grpc//:repositories.bzl", "bazel_gazelle", "io_bazel_rules_go")
io_bazel_rules_go()
load("#io_bazel_rules_go//go:deps.bzl", "go_register_toolchains", "go_rules_dependencies")
go_rules_dependencies()
go_register_toolchains()
bazel_gazelle()
load("#bazel_gazelle//:deps.bzl", "gazelle_dependencies")
gazelle_dependencies()
load("#rules_proto_grpc//github.com/grpc-ecosystem/grpc-gateway:repositories.bzl", rules_proto_grpc_gateway_repos="gateway_repos")
rules_proto_grpc_gateway_repos()
load("#grpc_ecosystem_grpc_gateway//:repositories.bzl", "go_repositories")
go_repositories()
###############################
The service-0.proto files has the following content:
syntax = "proto3";
import "google/protobuf/empty.proto";
import "google/api/annotations.proto";
import "protoc-gen-swagger/options/annotations.proto";
option (grpc.gateway.protoc_gen_swagger.options.openapiv2_swagger) = {
info: {
title: "Tool";
version: "1.0";
contact: {
name: " Tool project";
url: "https://gitlab.example.de/tool";
email: "example#test.de";
};
license: {
name: "Apache License 2.0";
url: "https://gitlab.example.de/tool/-/blob/master/LICENSE";
};
};
// Overwriting host entry breaks tests, so this is not done here.
external_docs: {
url: "https://github.com/grpc-ecosystem/grpc-gateway";
description: "More about gRPC-Gateway";
}
schemes: HTTPS;
consumes: "application/json";
produces: "application/json";
security_definitions: {
security: {
key: "Bearer";
value: {
type: TYPE_API_KEY;
in: IN_HEADER;
name: "Authorization";
}
}
}
responses: {
key: "403";
value: {
description: "Returned when the user does not have permission to access the resource.";
}
}
responses: {
key: "404";
value: {
description: "Returned when the resource does not exist.";
schema: {
json_schema: {
type: STRING;
}
}
}
}
};
service JWTService {
option (grpc.gateway.protoc_gen_swagger.options.openapiv2_tag) = {
description: "JWT Service CRUD."
};
rpc CreateJWTToken(CreateJWTTokenRequest) returns (CreateJWTTokenResponse) {}
rpc UpdateJWTToken(google.protobuf.Empty) returns (google.protobuf.Empty) {
option (google.api.http) = {
post: "/v1/jwt/update"
body: "*"
};
}
rpc DeleteJWTToken(google.protobuf.Empty) returns (google.protobuf.Empty) {}
}
message CreateJWTTokenRequest {
string username = 1;
string password = 2;
}
message CreateJWTTokenResponse {
string jwt = 1;
}
I'm using bazel to create the BUILD files. So running bazel run //:gazelle creates the following BUILD.bazel file in the proto/service-0 directory:
load("#rules_proto//proto:defs.bzl", "proto_library")
load("#io_bazel_rules_go//go:def.bzl", "go_library")
load("#io_bazel_rules_go//proto:def.bzl", "go_proto_library")
proto_library(
name = "0_proto",
srcs = ["service-0.proto"],
visibility = ["//visibility:public"],
deps = [
"#com_google_protobuf//:empty_proto",
"#go_googleapis//google/api:annotations_proto",
"//protoc-gen-swagger/options:options_proto",
],
)
go_proto_library(
name = "0_go_proto",
compilers = ["#io_bazel_rules_go//proto:go_grpc"],
importpath = "gitlab.example.de/tool/proto/service-0",
proto = ":0_proto",
visibility = ["//visibility:public"],
deps = [
"#go_googleapis//google/api:annotations_go_proto",
"//protoc-gen-swagger/options:options_proto",
],
)
go_library(
name = "service-0",
embed = [":0_go_proto"],
importpath = "gitlab.example.de/tool/proto/service-0",
visibility = ["//visibility:public"],
)
Now running bazel build //proto/service-0:service-0 gives the following error:
ERROR: /home/Documents/tool/proto/service-0/BUILD.bazel:16:1: no such package 'protoc-gen-swagger/options': BUILD file not found in any of the following directories. Add a BUILD file to a directory to mark it as a package.
So I add the following line to the root BUILD.bazel file how it is described here:
# gazelle:resolve proto protoc-gen-swagger/options/annotations.proto #grpc_ecosystem_grpc_gateway//protoc-gen-swagger/options:options_proto
to let gazelle generate #grpc_ecosystem_grpc_gateway//protoc-gen-swagger/options:options_proto instead of //protoc-gen-swagger/options:options_proto
This works without any problem.
Now how do I create the go-grpc files with bazel which I need to develop the service?
Running bazel build //proto/service-0:0_go_proto gives the following error:
ERROR: /home/Documents/tool/proto/service-0/BUILD.bazel:16:1: in deps attribute of go_proto_library rule //proto/service-0:0_go_proto: '#grpc_ecosystem_grpc_gateway//protoc-gen-swagger/options:options_proto' does not have mandatory providers: 'GoLibrary'
So for development it should be possible to generate the go-grpc files with bazel so that the generated grpc files are stored at the proto/service-x/ directories.
In addition I'm using grpc-gateway. So I added the following lines to the BUILD.bazel file in the service-0 directory how it is described here:
load("#rules_proto_grpc//github.com/grpc-ecosystem/grpc-gateway:defs.bzl", "gateway_grpc_compile", "gateway_grpc_library", "gateway_swagger_compile")
gateway_grpc_compile(
name = "0_gateway_grpc",
verbose = 1,
visibility = ["//visibility:public"],
deps = [":0_proto"],
)
gateway_swagger_compile(
name = "0_gateway_swagger_grpc",
visibility = ["//visibility:public"],
deps = [":0_proto"],
)
gateway_grpc_library(
name = "0_gateway_library",
importpath = "gitlab.example.de/tool/proto/0",
visibility = ["//visibility:public"],
deps = [":0_proto"],
)
Running bazel build :0_gateway_swagger_grpc gives the following output:
INFO: Invocation ID: 3b88f546-f09a-49d7-b238-0d41d98b9aa6
INFO: Analyzed target //proto/service-0:0_gateway_swagger_grpc (0 packages loaded, 0 targets configured).
INFO: Found 1 target...
Target //proto/service-0:0_gateway_swagger_grpc up-to-date:
dist/bin/proto/service-0/0_gateway_swagger_grpc/proto/service-0/service-0.swagger.json
dist/bin/proto/service-0/0_gateway_swagger_grpc/protoc-gen-swagger/options/annotations.swagger.json
dist/bin/proto/service-0/0_gateway_swagger_grpc/protoc-gen-swagger/options/openapiv2.swagger.json
INFO: Elapsed time: 0.092s, Critical Path: 0.00s
INFO: 0 processes.
INFO: Build completed successfully, 1 total action
This works without any problems. But when I try to generate the gateway files with bazel build :0_gateway_grpc I get the following errors:
INFO: Analyzed target //proto/service-0:0_gateway_grpc (3 packages loaded, 17 targets configured).
INFO: Found 1 target...
INFO: From Compiling protoc outputs for grpc_gateway_plugin plugin:
WARNING: Package "github.com/golang/protobuf/protoc-gen-go/generator" is deprecated.
A future release of golang/protobuf will delete this package,
which has long been excluded from the compatibility promise.
INFO: From Compiling protoc outputs for grpc_gateway_plugin plugin:
WARNING: Package "github.com/golang/protobuf/protoc-gen-go/generator" is deprecated.
A future release of golang/protobuf will delete this package,
which has long been excluded from the compatibility promise.
ERROR: /home/.cache/bazel/_bazel_/81e1d15aef6baed1975edd8b4c490e5b/external/grpc_ecosystem_grpc_gateway/protoc-gen-swagger/options/BUILD.bazel:20:1: output 'external/grpc_ecosystem_grpc_gateway/protoc-gen-swagger/options/options_proto/gateway_grpc_compile_aspect_verb1/protoc-gen-swagger/options/annotations.pb.gw.go' was not created
ERROR: /home/.cache/bazel/_bazel_/81e1d15aef6baed1975edd8b4c490e5b/external/grpc_ecosystem_grpc_gateway/protoc-gen-swagger/options/BUILD.bazel:20:1: output 'external/grpc_ecosystem_grpc_gateway/protoc-gen-swagger/options/options_proto/gateway_grpc_compile_aspect_verb1/protoc-gen-swagger/options/openapiv2.pb.gw.go' was not created
ERROR: /home/.cache/bazel/_bazel_/81e1d15aef6baed1975edd8b4c490e5b/external/grpc_ecosystem_grpc_gateway/protoc-gen-swagger/options/BUILD.bazel:20:1: not all outputs were created or valid
Target //proto/service-0:0_gateway_grpc failed to build
Use --verbose_failures to see the command lines of failed build steps.
INFO: Elapsed time: 0.243s, Critical Path: 0.11s
INFO: 2 processes: 2 linux-sandbox.
FAILED: Build did NOT complete successfully
So how can I fix this? And when this is fixed how can I make bazel to create/copy the files for development also in the proto/service-x directories?
I'm using bazelisk and bazel version 3.0.0

Not sure if you've already found a solution, but I'm trying to do something similar, and your notes were helpful to me.
Regarding this error:
ERROR: /home/Documents/tool/proto/service-0/BUILD.bazel:16:1: in deps attribute of go_proto_library rule //proto/service-0:0_go_proto: '#grpc_ecosystem_grpc_gateway//protoc-gen-swagger/options:options_proto' does not have mandatory providers: 'GoLibrary'
I found that you can fix this with a second gazelle:resolve directive which includes the import-lang argument. Specifically:
gazelle:resolve proto go protoc-gen-openapiv2/options/annotations.proto #grpc_ecosystem_grpc_gateway//protoc-gen-openapiv2/options:options_go_proto
I didn't encounter the other error you described.

Related

Where to put global custom ansible filters

I have a project with collections and roles. The collections and roles have custom filters and modules. I know where to create modules / filters in a role or collection.
Is it possible to have global filters? Where can I put these?
The structure of my project is
├── ansible_collections
│   ├── ...
├── group_vars
│   └── ...
├── host_vars
│   └── ...
├── plays
│   └── ..
├── roles
│   ├── external
│   └── internal
You can put filter_plugins in ansible.cfg for example
# set plugin path directories here, separate with colons
#action_plugins = /usr/share/ansible/plugins/action
#become_plugins = /usr/share/ansible/plugins/become
#cache_plugins = /usr/share/ansible/plugins/cache
#callback_plugins = /usr/share/ansible/plugins/callback
#connection_plugins = /usr/share/ansible/plugins/connection
#lookup_plugins = /usr/share/ansible/plugins/lookup
#inventory_plugins = /usr/share/ansible/plugins/inventory
#vars_plugins = /usr/share/ansible/plugins/vars
filter_plugins = filter_plugins
#test_plugins = /usr/share/ansible/plugins/test
#terminal_plugins = /usr/share/ansible/plugins/terminal
#strategy_plugins = /usr/share/ansible/plugins/strategy

How do I properly specify the location of my proto files in protocol buffers?

Here are the three files I am working with:
// city.proto
syntax = "proto3";
package city;
message City {
string cityName = 1;
string zipCode = 2;
string countryName = 3;
}
// street.proto
syntax = "proto3";
import "Exercise/city.proto";
package street;
message Street {
string cityName = 1;
city.City city = 2;
}
// building.proto
syntax = "proto3";
import "Exercise/street.proto";
package building;
message Building {
string buildingName = 1;
string buildingNumber = 2;
street.Street street = 3;
}
This is my current directory structure:
- PROTOCOLBUFFERS (folder on desktop)
- Exercise
- city.proto
- street.proto
- building.proto
This is the command I'm using to generate code from the proto files
protoc -I="."/Exercise --java_out=Exercise Exercise/*.proto
I am running this command with my terminal inside the PROTOCOLBUFFERS folder.
What am I doing wrong in the execution of this command? I am on windows.
This is the error message I get and online search for it hasn't been useful.
building.proto:3:1: Import "Exercise/street.proto" was not found or had errors.
building.proto:10:5: "street.Street" is not defined.
I'm unfamiliar with running protoc on Windows, but...
Try each of these in turn:
Replacing the Linux path separator / with the Windows separator \.
Using absolute paths from the drive root: protoc --proto_path=c:\...\Exercise --java_out=Exercise c:\...\Exercise\*.proto, i.e. replace c:\... with the correct path.
If that doesn't work, replace the single wildcard (*.proto) with full qualified paths to each proto c:\...\Exercise\city.proto c:\...\Exercise\building.proto c:\...\Exercise\street.proto
protoc is "picky". If you need to use a proto_path, you should repeat the appropriate path in subsequent references to proto files.
I am surprised to see that the documentation suggests that "import" is not supported in Java!? I use languages other than Java and would be surprised if this were true, but it is what it says:
https://developers.google.com/protocol-buffers/docs/proto3#importing_definitions
You should change your import path in the proto files: they are already in the same folder. So change as example:
// street.proto
syntax = "proto3";
import "city.proto";
instead of
// street.proto
syntax = "proto3";
import "Exercise/city.proto";
After this fix, the command generates the files as:
.
├── Exercise
│   ├── building
│   │   └── BuildingOuterClass.java
│   ├── building.proto
│   ├── city
│   │   └── CityOuterClass.java
│   ├── city.proto
│   ├── street
│   │   └── StreetOuterClass.java
│   └── street.proto
└── README.md
Hope this help

Grunt Sass: change one folder name on destination path

How to generate CSS files in a folder other than that of the SCSS source, but keeping a part of the tree of it?
Let me explain, I have different folders with several .scss sources:
resources/client-1/scss/app.scss
resources/client-2/scss/app.scss
I would like the CSS generated this place in the css folder and not scss. So:
resources/client-1/css/app.scss
resources/client-2/css/app.scss
But I would especially like to have a dynamic configuration, because the number of client-x folder will increase with time.
Any idea?
Thank.
EDIT:
My configuration using grunt-sass is currently as follows:
module.exports = {
options: {
outputStyle: 'nested',
precision: 3,
sourceMap: true
},
main: {
files: {
'resources/clients-1/css/style.css': ['resources/clients-1/scss/style.scss' ],
'resources/clients-2/css/style.css': [ 'resources/clients-2/scss/style.scss' ]
}
}
};
You can build the files object dynamically (instead of using the Files Object Format, which is what you're currently using), and utilize the rename property to create the new destination path for each resultant .css files.
Replace your files property with the following instead:
files: [{
expand: true,
cwd: 'resources',
src: ['**/*.scss'],
dest: 'resources/', // <-- Value must match `cwd` value and
// include trailing forward slash.
ext: '.css',
rename: function(dest, src) {
var parts = src.split('/');
parts.splice(1, 1, 'css');
return dest + parts.join('/');
}
}]
Explanation:
In the body of the rename function we firstly split the src path into an Array via the line which reads:
var parts = src.split('/');
Given your example source path of 'resources/clients-1/scss/style.scss', this will result in an Array assigned to the parts variable like this:
['clients-1', 'scss', 'style.scss']
We then use splice() in the line reading:
parts.splice(1, 1, 'css');
This replaces the value at index 1 (i.e. 'scss') with the given value of 'css'. The parts Array now results in this:
['clients-1', 'css', 'style.scss']
Finally, via the line which reads:
return dest + parts.join('/');
We return from the function a new String for the dest path. Given the previous example the dest path will be this:
'resources/clients-1/css/style.css'
Because the ext property has a value of .css, grunt replaces the original .scss extension name with .css.
Note:
If your resources folder has sub-folder(s) containing .scss files that you don't want to generate a .css file for you'll need to negate them using a globbing pattern. For example lets say your resources directory looks like this:
.
├── resources
│   ├── clients-1
│   │   └── scss
│ │ └── style.scss
│ ├── clients-2
│ │ └── scss
│ │ └── style.scss
│ ├── foo
│ │ └── scss
│ │ └── style.scss
│ └── quux
│ └── scss
│ └── style.scss
└── ...
And let's say you don't want .css files generated for the .scss files which exist in folders foo and quux, (i.e. you only want to process folders clients-1 and clients-2).
Given such a situation you would need to change your src property in the code example provided above to:
src: ['{,!(foo|quux)/**/}*.scss'],

Represent file directory-type structure in graphql

Say I have a file directory structure:
├── folder_a
│ └── folder_b
│ └── file_x
├── file_y
Is this possible to represent in a graphql query?
query FileTree {
?
}

Puppet Code coverage testing using rspec

My Module structure is this.
install_logging
├── files
│ └── install_logging.sh
├── Gemfile
├── Gemfile.lock
├── manifests
│ ├── \
│ ├── empty.rb
│ └── init.pp
├── Modulefile
├── Rakefile
├── README
├── spec
│ ├── chkcls_sec.rb
│ ├── classes
│ │ ├── init1_spec.rb
│ │ ├── init_spec.rb
│ │ └── spec_helper.rb
│ ├── coverage_spec.rb
│ ├── defines
│ ├── fixtures
│ │ ├── manifests
│ │ │ └── site.pp
│ │ └── modules
│ │ └── install_logging
│ │ ├── files -> ../../../../files
│ │ ├── manifests -> ../../../../manifests
│ │ └── templates -> ../../../../templates
│ ├── functions
│ ├── hosts
│ └── spec_helper.rb
├── templates
│ └── agent.sh.erb
└── tests
└── init.pp
manifests/init.pp file code.
class install_logging {
file { '/tmp/install_logging.sh':
ensure => 'present',
mode => '0644',
source => 'puppet:///modules/install_logging/install_logging.sh'
}-> exec { 'Install Logging Agent':
provider => shell,
command => 'bash /tmp/install_logging.sh',
logoutput => on_failure,
}
}
$ua_module_name = 'VivekMishra01/Google_Cloud_Logging1'
$ua_module_version = "${ua_module_name}/1.1.0"
file { '/tmp/agent.sh':
ensure => file,
mode => '0755',
content => template('gcloudsdk/agent.sh.erb'),
require => Exec['Remove Components'],
}-> exec { 'Agent':
provider => shell,
command => 'sh /tmp/agent.sh',
logoutput => on_failure,
}
spec/classes/init_spec.rb file code
require 'spec_helper'
describe 'contains install_logging' do
it { File.exist?('File.join(File.dirname(__FILE__),init.pp)') }
end
at_exit { RSpec::Puppet::Coverage.report! }
This is what I am trying to do.
root#ubuntu-14-04:/home/vivekkumarmishra17/Mymodule/install_logging# rspec spec/classes/init_spec.rb
.
Finished in 0.00164 seconds (files took 0.59198 seconds to load)
1 example, 0 failures
Total resources: 0
Touched resources: 0
Resource coverage: NaN%
Untouched resources:
Problem is that why it's not able to find any resource although 1 example is tested successfully.
Total resources: 0
Touched resources: 0
Resource coverage: NaN%
Untouched resources:
Any help will be highly appreciated. Thanks.
Please use the Puppet Development Kit to generate your modules and classes. The PDK will generate working unit tests for those.
For writing unit tests using rspec-puppet, please refer to the docsite, in this case, specifically to testing classes, and resources.

Resources