Axon 4 kafka how to invoke eventhandler - spring

i'm studying axon. i want to make simple axon app with kafka and mongodb.
i'm using
two instances (command app, query app)
mongodb and kafka without axon server
kotlin
multi-module
axon framework 4.5
i checked command app that storing event data to mongodb.
However event handler not invoked.
here is project structure
practice-root
│
├── command
│ ├── build
│ ├── src
│ │ ├── main
│ │ │ ├ kotlin
│ │ │ │ └ com.cqrs.axon
│ │ │ │ ├ Application.kt
│ │ │ │ ├ SimpleDTO
│ │ │ │ ├ SimpleController
│ │ │ │ ...
│ │ │ └ resources
│ │ ├── test
│ │ │
│ └── build.gradle
│
├── query
│ ├ ...
│ ...
│
├── common
│ ├ ...
│ ...
│
├── README
├── build.gradle
└── settings.gradle
here is my code
command module
Application.kt
#SpringBootApplication()
class Application
fun main(args: Array<String>) {
runApplication<Application>(*args)
}
SimpleService.kt
#Service
class SimpleService(
private val eventGateway: EventGateway
) {
#CommandHandler
fun createSimple(simpleDTO: SimpleDTO): Unit {
return this.eventGateway.publish(
SimpleEvent(
id = UUID.randomUUID().toString(),
data = simpleDTO.data
)
)
}
}
SimpleDTO.kt
data class SimpleDTO (
val data: String
)
SimpleController.kt
#RestController
class SimpleController(
private val simpleService: SimpleService
) {
#PostMapping("/simple")
fun createSimple(#RequestBody simpleDTO: SimpleDTO): Unit {
return simpleService.createSimple(simpleDTO)
}
}
AxonConfig.kt
#Configuration
class AxonConfig {
#Bean
fun eventStore(storageEngine: EventStorageEngine, configuration: AxonConfiguration): EmbeddedEventStore {
return EmbeddedEventStore.builder()
.storageEngine(storageEngine)
.messageMonitor(configuration.messageMonitor(EventStore::class.java, "eventStore"))
.build()
}
#Bean
fun storageEngine(client: MongoClient): EventStorageEngine {
return MongoEventStorageEngine.builder()
.mongoTemplate(DefaultMongoTemplate.builder().mongoDatabase(client).build()).build()
}
/**
* Creates a Kafka producer factory, using the Kafka properties configured in resources/application.yml
*/
#Bean
fun producerFactory(kafkaProperties: KafkaProperties): ProducerFactory<String, ByteArray> {
return DefaultProducerFactory.builder<String, ByteArray>()
.configuration(kafkaProperties.buildProducerProperties())
.producerCacheSize(10_000)
.confirmationMode(ConfirmationMode.WAIT_FOR_ACK)
.build()
}
}
application.yml
---
server:
port: 8080
spring:
application:
name: commandSpringApplication
data:
mongodb:
uri: mongodb://user:u123#localhost:27016/test?authSource=admin
autoconfigure:
exclude:
- org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration
axon:
axonserver:
enabled: false
kafka:
client-id: myproducer
default-topic: axon-events
producer:
bootstrap-servers: localhost:9092
event-processor-mode: tracking
transaction-id-prefix: sample-eventstx
properties:
security.protocol: PLAINTEXT
logging:
level:
com:
cqrs:
command: debug
org:
axonframework: debug
query module
Application.kt
#SpringBootApplication()
class Application
fun main(args: Array<String>) {
runApplication<Application>(*args)
}
QuerySimpleProjection.kt
#Component
#ProcessingGroup("SampleProcessor")
class QuerySimpleProjection (
private val simpleRepository: QuerySimpleRepository
) {
#EventHandler
fun on(event: SimpleEvent, #Timestamp instant: Instant) {
val simpleMV = SimpleMV(
id = event.id,
data = event.data
)
simpleRepository.save(simpleMV)
}
}
QuerySimpleRepository.kt
#Repository
interface QuerySimpleRepository : JpaRepository<SimpleMV, String>
SimpleMV.kt
#Entity
#Table(name = "mv_simple_mongo")
data class SimpleMV (
#Id
val id: String,
val data: String
)
AxonConfig.kt
#Configuration
class AxonConfig {
#Autowired
fun configureStreamableKafkaSource(configurer: EventProcessingConfigurer,
streamableKafkaMessageSource: StreamableKafkaMessageSource<String, ByteArray>
) {
configurer.registerTrackingEventProcessor("SampleProcessor") { streamableKafkaMessageSource }
}
}
application.yml
---
server:
port: 9090
spring:
application:
name: queryMongoSpringApplication
datasource:
driver-class-name: com.mysql.cj.jdbc.Driver
url: jdbc:mysql://localhost:33060/test?useSSL=false&characterEncoding=utf8&useUnicode=true
username: user
password: u123
jpa:
show-sql: true
properties:
hibernate:
dialect: org.hibernate.dialect.MySQL8Dialect
hbm2ddl.auto: update
format_sql: true
jdbc:
time_zone: UTC
axon:
axonserver:
enabled: false
kafka:
client-id: myconsumer
default-topic: axon-events
consumer:
bootstrap-servers: localhost:9092
event-processor-mode: tracking
properties:
security.protocol: PLAINTEXT
logging:
level:
com:
cqrs:
command: debug
org:
axonframework: debug
common module
SimpleEvent.kt
data class SimpleEvent (
val id: String,
val data: String
)
build.gradle of command module and query module
apply plugin: 'kotlin-jpa'
apply plugin: 'org.springframework.boot'
apply plugin: 'kotlin-allopen'
...
dependencies {
implementation project(':common')
implementation 'org.springframework.boot:spring-boot-starter-validation'
implementation 'mysql:mysql-connector-java'
implementation 'org.springframework.boot:spring-boot-starter-web'
implementation 'org.springframework.boot:spring-boot-starter-data-jpa'
implementation 'org.apache.kafka:kafka-clients'
implementation 'org.axonframework.extensions.kafka:axon-kafka-spring-boot-starter:4.5.3'
compile "org.axonframework:axon-spring-boot-starter:4.5.8", {
exclude group:'org.axonframework', module: 'axon-server-connector'
}
implementation "org.axonframework:axon-configuration:4.5.8"
testImplementation 'org.springframework.boot:spring-boot-starter-test'
}
query, common, command app package names are same - com.cqrs.axon
Please let me know..
or please recommand other example project with multiple module and kafka, mongodb(or mysql).
i see kafka-extension-example a lot but still very confusing how to use axon

Related

Observing frequent set_autocommit = ? and db.SQL.innodb.rows mysql spikes with springboot mysql 5 with 3 pod deployment

Observing high number of set_autocommit = ?
Observing peoridic (20 mins) spike in rds performance insight
Running versions:
id 'org.springframework.boot' version '2.5.4'
id 'io.spring.dependency-management' version '1.0.11.RELEASE'
id "com.palantir.docker" version "0.26.0"
id "com.palantir.docker-run" version "0.26.0"
id 'pl.allegro.tech.build.axion-release' version '1.13.2'
id 'com.appland.appmap' version '1.1.0'
3 pods running on aws eks
db -> aws rds 5.7.mysql_aurora.2.10.1
Here are the configurations:
application.yml
spring:
application:
name: xyz
profiles:
# The commented value for `active` can be replaced with valid Spring profiles to load.
# Otherwise, it will be filled in by gradle when building the JAR file
# Either way, it can be overridden by `--spring.profiles.active` value passed in the commandline or `-Dspring.profiles.active` set in `JAVA_OPTS`
active: dev
group:
dev:
- dev
- api-docs
# Uncomment to activate TLS for the dev profile
#- tls
prod:
- prod
- api-docs
# Uncomment to activate TLS for the dev profile
#- tls
stage:
- stage
jmx:
enabled: false
data:
web:
pageable:
default-page-size: 20
max-page-size: 20
jpa:
repositories:
bootstrap-mode: deferred
jpa:
open-in-view: false
properties:
hibernate.jdbc.time_zone: UTC
hibernate.id.new_generator_mappings: true
hibernate.connection.provider_disables_autocommit: true #https://vladmihalcea.com/why-you-should-always-use-hibernate-connection-provider_disables_autocommit-for-resource-local-jpa-transactions/
hibernate.cache.use_second_level_cache: true
hibernate.cache.region.factory_class: org.hibernate.cache.ehcache.EhCacheRegionFactory
hibernate.cache.use_query_cache: true
hibernate.javax.cache.missing_cache_strategy: create
# modify batch size as necessary
hibernate.jdbc.batch_size: 20
hibernate.order_inserts: true
hibernate.order_updates: true
hibernate.batch_versioned_data: true
hibernate.query.fail_on_pagination_over_collection_fetch: true
hibernate.query.in_clause_parameter_padding: true
hibernate.dialect: org.hibernate.dialect.MySQL5Dialect
javax.persistent.sharedCache.mode: ENABLE_SELECTIVE
hibernate:
ddl-auto: none
naming:
physical-strategy: org.springframework.boot.orm.jpa.hibernate.SpringPhysicalNamingStrategy
implicit-strategy: org.springframework.boot.orm.jpa.hibernate.SpringImplicitNamingStrategy
messages:
basename: i18n/messages
main:
allow-bean-definition-overriding: true
task:
execution:
thread-name-prefix: xyz-task-
pool:
core-size: 2
max-size: 50
queue-capacity: 10000
scheduling:
thread-name-prefix: catalogue-scheduling-
pool:
size: 2
thymeleaf:
mode: HTML
output:
ansi:
console-available: true
server:
servlet:
session:
cookie:
http-only: true
tomcat:
mbeanregistry:
enabled: true
threads:
max: 100
compression:
enabled: true
mime-types: "text/html,text/xml,text/plain,text/css,text/javascript,application/javascript,application/json"
min-response-size: 1024
port: 8080
# Properties to be exposed on the /info management endpoint
info:
# Comma separated list of profiles that will trigger the ribbon to show
display-ribbon-on-profiles: 'dev'
management:
endpoints:
web:
exposure:
include: "health,info,metrics,prometheus"
endpoint:
health:
probes:
enabled: true
show-details: always
show-components: always
application-prod.yml
logging:
level:
ROOT: INFO
org.hibernate.SQL: ERROR
com.pitstop.catalogue: INFO
com.zaxxer.hikari: INFO
config: classpath:logback-prod.xml
spring:
devtools:
restart:
enabled: true
additional-exclude: static/**
jackson:
serialization:
indent-output: true
datasource:
auto-commit: false
type: com.zaxxer.hikari.HikariDataSource
url: ${SPRING_DATASOURCE_URL}
username: ${SPRING_DATASOURCE_USERNAME}
password: ${SPRING_DATASOURCE_PASSWORD}
hikari:
poolName: CatalogJPAHikariCP
minimumIdle: 10
maximumPoolSize: 120
connectionTimeout: 30000
idleTimeout: 600000
maxLifetime: 1800000
auto-commit: false
data-source-properties:
testWhileIdle: true
validationQuery: SELECT 1 FROM DUAL
cachePrepStmts: true
prepStmtCacheSize: 250
prepStmtCacheSqlLimit: 2048
useServerPrepStmts: true
useLocalSessionState: true
rewriteBatchedStatements: true
cacheResultSetMetadata: true
cacheServerConfiguration: true
maintainTimeStats: true
servlet:
multipart:
location: /data/tmp
jpa:
hibernate:
ddl-auto: none
properties:
spring.jpa.show-sql: false
hibernate.generate_statistics: false
liquibase:
contexts: prod
messages:
cache-duration: PT1S # 1 second, see the ISO 8601 standard
thymeleaf:
cache: false
sleuth:
sampler:
probability: 1 # report 100% of traces
Example of one entity:
#Entity
#Table(name = "product_model")
#org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
public class ProductModel implements Serializable {
private static final long serialVersionUID = 1L;
#Id
#GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
#NotNull
#Size(max = 50)
#Column(name = "name", length = 50, nullable = false)
private String name;
#ManyToOne(optional = false, fetch = FetchType.LAZY)
#JsonIgnoreProperties(value = {"productModels"}, allowSetters = true)
private ProductMake productMake;
#OneToMany(mappedBy = "productModel", fetch = FetchType.LAZY)
#JsonIgnoreProperties(value = {"product", "productModel"}, allowSetters = true)
#BatchSize(size = 20)
#org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.READ_WRITE)
private Set<ProductModelMapping> productModelMappings = new HashSet<>();
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public ProductMake getProductMake() {
return this.productMake;
}
public void setProductMake(ProductMake productMake) {
this.productMake = productMake;
}
public Set<ProductModelMapping> getProductModelMappings() {
return this.productModelMappings;
}
public void setProductModelMappings(Set<ProductModelMapping> productModelMappings) {
if (this.productModelMappings != null) {
this.productModelMappings.forEach(i -> i.setProductModel(null));
}
if (productModelMappings != null) {
productModelMappings.forEach(i -> i.setProductModel(this));
}
this.productModelMappings = productModelMappings;
}
#Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof ProductModel)) {
return false;
}
return id != null && id.equals(((ProductModel) o).id);
}
#Override
public int hashCode() {
return getClass().hashCode();
}
#Override
public String toString() {
return "ProductModel{" +
"id=" + getId() +
", name='" + getName() + "'" +
"}";
}
}
One of the JPA repo:
#Repository
public interface BrandRepository extends JpaRepository<Brand, Long>, JpaSpecificationExecutor<Brand> {
}
One of the service methods:
I keep #Transactional at service class level
Do not keep #Transactional on save / update service layer methods
#Transactional(readOnly = true)
public Page<BrandDto> findAll(Pageable pageable) {
log.debug("Request to get all Brands");
return baseService.findAndBuild(pageable, brandRepository);
}
#Override
public Page<T> findAndBuild(Pageable pageable, JpaRepository<E, K> repository) {
final Page<E> pageEntityResponse = repository.findAll(pageable);
return pageConverter(pageEntityResponse);
}
Couple of Issues that I am facing is :
lot of set_autocommit = ? being fired
db.SQL.innodb.rows mysql spikes in every 20 minutes interval
I see you’ve got AppMap enabled.
You may try the option appmap.recording.auto as described at https://appland.com/docs/reference/appmap-java.html.
Start your server, let it run for a bit, shut it down and you should see those queries being issued (by the connection pool, I suspect). You can include the package names of the connection pool, ORM, and database driver in your appmap.yml if you want to extra detail.
Additional help is available in Discord - https://discord.com/invite/N9VUap6

Set Prefix to Spring Micrometer Merics using Statsd and Datadog

I'm trying to Implement Custom Metrics Integration for my App. Using the following setup
// DogStatsd Metrics Integration with MicroMeter
implementation group: 'io.micrometer', name: 'micrometer-registry-statsd', version: '1.7.2'
Custom Spring Configuration Added for the application
#Configuration
public class MetricConfiguration {
private final MeterRegistry meterRegistry;
#Value("${management.metrics.export.statsd.prefix}")
private String prefix;
#Autowired
public MetricConfiguration(MeterRegistry meterRegistry) {
this.meterRegistry = meterRegistry;
}
#Bean
public MeterRegistryCustomizer<MeterRegistry> metricsCommonTags() {
return registry -> registry.config().meterFilter(new MeterFilter() {
#Override
public Meter.Id map(Meter.Id id) {
if (!id.getName().startsWith(prefix)) {
return id.withName(prefix + "." + id.getName());
} else {
return id;
}
}
});
}
#Bean
public TimedAspect timedAspect() {
return new TimedAspect(meterRegistry);
}
}
YAML Configuration for Metrics
management:
metrics:
enable:
jvm: false
process: false
tomcat: false
system: false
logback: false
distribution:
slo:
http.server.requests: 50ms
percentiles-histogram:
http.server.requests: true
percentiles:
http.server.requests: 0.99
export:
statsd:
enabled: false
flavor: datadog
host: ${DD_AGENT_HOST}
port: 8125
prefix: ${spring.application.name}
endpoints:
enabled-by-default: true
web:
exposure:
include: "*"
endpoint:
metrics:
enabled: true
health:
enabled: true
show-components: "always"
show-details: "always"
Trying to set the prefix to all the custom metrics, but after setting the prefix the excluded metrics are breaking are started showing in the /actuator/metrics response.
The response looks like below:
{
names: [
"my-service.http.server.requests",
"my-service.jvm.buffer.count",
"my-service.jvm.buffer.memory.used",
"my-service.jvm.buffer.total.capacity",
"my-service.jvm.classes.loaded",
"my-service.jvm.classes.unloaded",
"my-service.jvm.gc.live.data.size",
"my-service.jvm.gc.max.data.size",
"my-service.jvm.gc.memory.allocated",
"my-service.logback.events",
"my-service.process.cpu.usage",
"my-service.process.files.max",
"my-service.process.files.open",
"my-service.process.start.time",
"my-service.process.uptime",
"my-service.system.cpu.count",
"my-service.system.cpu.usage",
"my-service.system.load.average.1m",
"my-service.tomcat.sessions.active.current",
"my-service.tomcat.sessions.active.max",
"my-service.tomcat.sessions.alive.max",
"my-service.tomcat.sessions.created",
"my-service.tomcat.sessions.expired",
"my-service.tomcat.sessions.rejected"
]
}

How can I configure the Jib Extension inside a buildSrc kotlin file?

I'm trying to modularize my build.gradle.kts. It was suggested to me to create a buildSrc folder.
After some research and som asking I found this article I hated Gradle!... so this was my try:
buildSrc tree:
buildSrc/
├── build.gradle.kts
├── settings.gradle.kts
└── src
└── main
├── kotlin
│   ├── Docker.kt
│   ├── MyProjectExtensions.kt
│   └── Versions.kt
└── resources
└── META-INF
└── gradle-plugins
└── pt.branden.brandenportal.properties
My build.gradle.kts:
plugins {
`kotlin-dsl`
id("com.google.cloud.tools.jib") version Versions.jib
}
repositories {
mavenCentral()
google()
jcenter()
}
dependencies {
implementation("gradle.plugin.com.google.cloud.tools:jib-gradle-plugin:${Versions.jib}")
implementation("org.jetbrains.kotlin:kotlin-gradle-plugin:1.3.50")
implementation(gradleApi())
implementation(localGroovy())
}
And finally Docker.kt:
import org.gradle.api.Plugin
import org.gradle.api.Project
open class JibConfigPlugin : Plugin<Project> {
override fun apply(target: Project) {
//configureJib()
TODO("not implemented")
}
}
//internal fun Project.configureJib() = this.extensions.getByType<JibExtension>().run {}
internal fun Project.configureJib() = project.configure<JibExtension>() {
TODO("not implemented")
}
My problem is that I can't find the JibExtension, so when I try to implement and configure the Jib it doesn't work but in the build.gradle.kts everything works.
My problem is that I can't find the JibExtension
Plugins or extensions can be applied in a variety of different ways. You can "react" to the plugin being applied by using the withPlugin method of PluginManager:
class JibConfigPlugin : Plugin<Project> {
override fun apply(project: Project) {
project.pluginManager.withPlugin("com.google.cloud.tools.jib") {
// Configuration happens inside this Action block.
}
}
}
Using this method you can be certain that a plugin exists/has been applied without forcing a user/project to use the plugin.
The Jib plugin offers a single extension and a variety of tasks.
Configuring the extension can be done with the following:
class JibConfigPlugin : Plugin<Project> {
override fun apply(project: Project) {
project.pluginManager.withPlugin("com.google.cloud.tools.jib") {
project.extensions.configure<JibExtension> {
// Example configuring the `container`
container {
creationTime = "USE_CURRENT_TIMESTAMP"
}
}
}
}
}
Looking over the source of the Gradle plugin for Jib, the authors used lazy configuration for the tasks, so it is best to also use the same method to configure those tasks.
For example, to configure the jib task:
class JibConfigPlugin : Plugin<Project> {
override fun apply(project: Project) {
project.pluginManager.withPlugin("com.google.cloud.tools.jib") {
project.tasks.named<BuildImageTask>("jib") {
to {
setTargetImage("my_acr_name.azurecr.io/my-app")
}
}
}
}
}
The above uses the named method which returns a TaskProvider
Then simply apply your plugin as documented here: https://guides.gradle.org/writing-gradle-plugins/#apply_the_plugin_to_the_host_project
Source of the build.gradle.kts I used to test:
plugins {
`kotlin-dsl`
}
repositories {
gradlePluginPortal()
}
dependencies {
implementation("gradle.plugin.com.google.cloud.tools:jib-gradle-plugin:1.7.0")
}

How to persist enums as ordinals with Spring Boot and Cassandra?

To the enum field of my entity I have added #CassandraType(type = DataType.Name.INT). However not the ordinal of the enum, but the string representation instead, is used in the statement sent to Cassandra. Thus I get the following error:
org.springframework.data.cassandra.CassandraInvalidQueryException: SessionCallback; CQL [INSERT INTO thing (thing_id,some_enum) VALUES (1,'Foo');]; Expected 4 or 0 byte int (3); nested exception is com.datastax.driver.core.exceptions.InvalidQueryException: Expected 4 or 0 byte int (3)
Below you can find a minimal example, reproducing the problem.
What am I doing wrong?
test/src/main/kotlin/enumtest/Application.kt
package enumtest
import org.springframework.boot.autoconfigure.SpringBootApplication
import org.springframework.boot.runApplication
#SpringBootApplication
class Application
fun main(args: Array<String>) {
runApplication<Application>(*args)
}
test/src/main/kotlin/enumtest/SomeEnum.kt
package enumtest
enum class SomeEnum {
Foo,
Bar
}
test/src/main/kotlin/enumtest/Thing.kt
package enumtest
import com.datastax.driver.core.DataType
import org.springframework.data.cassandra.core.cql.PrimaryKeyType
import org.springframework.data.cassandra.core.mapping.CassandraType
import org.springframework.data.cassandra.core.mapping.Column
import org.springframework.data.cassandra.core.mapping.PrimaryKeyColumn
import org.springframework.data.cassandra.core.mapping.Table
#Table("thing")
#Suppress("unused")
class Thing(
#PrimaryKeyColumn(name = "thing_id", ordinal = 0, type = PrimaryKeyType.PARTITIONED)
var thingId: Long,
#CassandraType(type = DataType.Name.INT)
#Column("some_enum")
var someEnum: SomeEnum
)
test/src/main/kotlin/enumtest/ThingRepository.kt
package enumtest
import org.springframework.data.cassandra.repository.CassandraRepository
import org.springframework.stereotype.Repository
#Repository
interface ThingRepository : CassandraRepository<Thing, Long>
test/src/main/resources/application.yml
spring:
data:
cassandra:
contact-points: localhost
port: 9142
keyspace_name: enumtest
test/src/test/kotlin/enumtest/PersistenceTest.kt
package enumtest
import org.cassandraunit.spring.CassandraDataSet
import org.cassandraunit.spring.CassandraUnitDependencyInjectionTestExecutionListener
import org.cassandraunit.spring.EmbeddedCassandra
import org.junit.Assert
import org.junit.Test
import org.junit.runner.RunWith
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.boot.test.context.SpringBootTest
import org.springframework.test.context.TestExecutionListeners
import org.springframework.test.context.junit4.SpringRunner
#RunWith(SpringRunner::class)
#SpringBootTest
#TestExecutionListeners(
listeners = [CassandraUnitDependencyInjectionTestExecutionListener::class],
mergeMode = TestExecutionListeners.MergeMode.MERGE_WITH_DEFAULTS
)
#CassandraDataSet(value = ["cql/cassandra_schema.cql"], keyspace = "enumtest")
#EmbeddedCassandra
class PersistenceTest {
#Autowired
lateinit var thingRepository: ThingRepository
#Test
fun `test save`() {
thingRepository.save(Thing(1, SomeEnum.Foo))
val things = thingRepository.findAll()
Assert.assertEquals(1, things.size)
val thing = things[0]
Assert.assertEquals(SomeEnum.Foo, thing.someEnum)
}
}
test/src/test/resources/cql/cassandra_schema.cql
CREATE KEYSPACE IF NOT exists enumtest
WITH REPLICATION = {'class':'SimpleStrategy', 'replication_factor':1};
CREATE TABLE IF NOT exists enumtest.thing (
thing_id bigint,
some_enum int,
PRIMARY KEY (thing_id)
);
test/build.gradle
plugins {
id 'org.springframework.boot' version '2.1.4.RELEASE'
id 'org.jetbrains.kotlin.jvm' version '1.3.30'
id 'org.jetbrains.kotlin.plugin.spring' version '1.3.30'
}
apply plugin: 'io.spring.dependency-management'
group = 'com.example'
version = '0.0.1-SNAPSHOT'
sourceCompatibility = '1.8'
repositories {
mavenCentral()
maven { url "https://repository.apache.org/snapshots/" }
}
dependencies {
implementation group: 'org.springframework.boot', name: 'spring-boot-starter'
implementation group: 'org.springframework.boot', name: 'spring-boot-starter-data-cassandra'
implementation group: 'org.jetbrains.kotlin', name: 'kotlin-stdlib-jdk8'
implementation group: 'org.jetbrains.kotlin', name: 'kotlin-reflect'
testImplementation group: 'org.cassandraunit', name: 'cassandra-unit-spring', version: '3.5.0.1'
testImplementation group: 'org.springframework.boot', name: 'spring-boot-starter-test'
}
compileKotlin {
kotlinOptions {
freeCompilerArgs = ['-Xjsr305=strict']
jvmTarget = '1.8'
}
}
compileTestKotlin {
kotlinOptions {
freeCompilerArgs = ['-Xjsr305=strict']
jvmTarget = '1.8'
}
}
Here is the full version of the minimal example as a download to faciliate experimentation: https://drive.google.com/open?id=1zzIDhbWycaj4WXrze2sAmw8xRPacA8Js
Edit: Since it seems to be a bug, I just opened a Jira issue.
I've been trying to get this working for quite awhile and it seems I finally got it!
I was running into the same issue you were with the codec...I have no idea why that's not working. According to their documentation you were doing it exactly right.
So I implemented my own Cassandra Write Converter. See below
#Configuration
class CassandraConfig(val cluster: Cluster){
#Bean
fun setCustomCassandraConversions() = CassandraCustomConversions(listOf(EnumWriteConverter.INSTANCE, EnumReadConverter.INSTANCE))
#WritingConverter
enum class EnumWriteConverter : Converter<Enum<MyEnum>, Int> {
INSTANCE;
override fun convert(source: Enum<MyEnum>) = source.ordinal
}
#ReadingConverter
enum class EnumReadConverter : Converter<Int, Enum<MyEnum>> {
INSTANCE;
override fun convert(source: Int) = MyEnum.values()[source]
}
}
This should on every write you do to Cassandra convert all enums it sees of type MyEnum to an Int using the overridden converter. This opens you up to the possibility of having multiple of these for different types of Enums where maybe for some reason you would like to write other custom values from them instead of always converting all enums.
Hope this works!
EDIT
Note the change in removing { } for INSTANCE on each converter, and registering the ReadingConverter with the CassandraCustomConversions
This is fixed since Spring Boot version 2.1.5.
However, the #CassandraType needs to be placed at the getter explicitly in Kotlin, because otherwise it is not seen at runtime.
In practice this simply means replacing this:
#CassandraType(type = DataType.Name.INT)
var someEnum: SomeEnum
with that:
#get: CassandraType(type = DataType.Name.INT)
var someEnum: SomeEnum

Separate Gradle source set for integration tests using Kotlin DSL

I'm working on a Spring Boot application implemented in Kotlin, and would like to migrate the Gradle build to use the Gradle Kotlin DSL.
The one thing I cannot figure out is how to set up a separate source set and task for my integration tests.
My source tree looks like this:
src
├── integrationTest
│ ├── kotlin
│ └── resources
├── main
│ ├── kotlin
│ └── resources
└── test
├── kotlin
└── resources
And the source set and task are set up like this with Gradle's Groovy DSL:
// build.gradle
sourceSets {
integrationTest {
kotlin {
compileClasspath += sourceSets.main.output + configurations.testRuntimeClasspath
runtimeClasspath += output + compileClasspath
}
}
}
configurations {
integrationTestCompile.extendsFrom testCompile
integrationTestRuntime.extendsFrom testRuntime
}
task integrationTest(type: Test, dependsOn: []) {
testClassesDirs = sourceSets.integrationTest.output.classesDirs
classpath = sourceSets.integrationTest.runtimeClasspath
}
I've found many examples for using the Gradle Kotlin DSL, and for additional source sets - but nothing for the combination.
Can anyone help?
Here's how you can translate the Groovy script to the Kotlin DSL:
java {
sourceSets {
val integrationTest by creating {
kotlin.apply {
compileClasspath += sourceSets["main"].output + configurations.testRuntimeClasspath
runtimeClasspath += output + compileClasspath
}
}
}
}
configurations["integrationTestCompile"].extendsFrom(configurations["testCompile"])
configurations["integrationTestRuntime"].extendsFrom(configurations["testRuntime"])
val integrationTest by tasks.creating(Test::class) {
val integrationTestSourceSet = java.sourceSets["integrationTest"]
testClassesDirs = integrationTestSourceSet.output.classesDirs
classpath = integrationTestSourceSet.runtimeClasspath
}
Also see: the Migrating build logic from Groovy to Kotlin guide by Gradle

Resources