Spring Boot multiple MongoDB configuration - spring

I have looked everywhere for this and it seems like I cannot find a solution that works. I am using spring boot 1.5.10-RELEASE. I am trying to configure two different mongodb instances in the same application. Here is my code:
Main Application:
#SpringBootApplication(exclude = {MongoAutoConfiguration.class})
#ComponentScan("com.reef.reports")
public class MainApplication
{
public static void main(String[] args)
{
SpringApplication.run(MainApplication.class, args);
}
}
1st Instance
#Configuration
#EnableMongoRepositories(basePackages = {"com.reef.repository.mongousa"} , mongoTemplateRef = "USAMongo")
public class MongoUsaConfig
{
#Value("${usa.mongodb.host}")
private String host;
#Value("${usa.mongodb.database:reef}")
private String database;
#Value("${usa.mongodb.port:27017}")
private int port;
#Value("${usa.mongodb.username:}")
private String username;
#Value("${usa.mongodb.password:}")
private String password;
#Value("${usa.mongodb.authdb:}")
private String authdb;
private final List<MongoCredential> credentials = new ArrayList<>();
private final List<ServerAddress> hosts = new ArrayList<>();
/**
* Method that creates MongoDbFactory
* Common to both of the MongoDb connections
*/
public MongoDbFactory mongoDbFactory()
{
return new SimpleMongoDbFactory(getMongoClient(), database);
}
/**
* Method that creates MongoClient
*/
#Bean(name = "USAClient")
public MongoClient getMongoClient()
{
if ((null != username)&&(!username.isEmpty()))
{
hosts.add(new ServerAddress(host, port));
credentials.add(MongoCredential.createMongoCRCredential(username, authdb, password.toCharArray()));
return new MongoClient(hosts, credentials);
}
else
{
return new MongoClient(host, port);
}
}
#Primary
#Bean(name = "USAMongo")
public MongoTemplate getMongoTemplate()
{
return new MongoTemplate(mongoDbFactory());
}
}
2nd Instance
#Configuration
#EnableMongoRepositories(basePackages = {"com.reef.repository.mongocan"} , mongoTemplateRef = "CANMongo")
public class MongoCanConfig
{
#Value("${can.mongodb.host}")
private String host;
#Value("${can.mongodb.database:reef}")
private String database;
#Value("${can.mongodb.port:27017}")
private int port;
#Value("${can.mongodb.username:}")
private String username;
#Value("${can.mongodb.password:}")
private String password;
#Value("${can.mongodb.authdb:}")
private String authdb;
private final List<MongoCredential> credentials = new ArrayList<>();
private final List<ServerAddress> hosts = new ArrayList<>();
/**
* Method that creates MongoDbFactory
* Common to both of the MongoDb connections
*/
public MongoDbFactory mongoDbFactory()
{
return new SimpleMongoDbFactory(getMongoClient(), database);
}
/**
* Method that creates MongoClient
*/
#Bean(name = "CANClient")
public MongoClient getMongoClient()
{
if ((null != username)&&(!username.isEmpty()))
{
hosts.add(new ServerAddress(host, port));
credentials.add(MongoCredential.createMongoCRCredential(username, authdb, password.toCharArray()));
return new MongoClient(hosts, credentials);
}
else
{
return new MongoClient(host, port);
}
}
#Bean(name = "CANMongo")
public MongoTemplate getMongoTemplate()
{
return new MongoTemplate(mongoDbFactory());
}
}
When I run the application, it will run the configuration for the first instance. However, it will not pick up the second instance. I have put in breakpoints to debug and it never hits the breakpoint in the configuration. The repositories in this package get loaded correctly:
com.reef.repository.mongousa
The errors happen with the repositories in this package:
com.reef.repository.mongocan
Please let me know what I am missing. Why does one config work and the other does not?

Refer to this blog post https://medium.com/#joeclever/using-multiple-datasources-with-spring-boot-and-spring-data-6430b00c02e7
I tried it out for two MySQL DataBases and it is working fine.

Related

AbstractRoutingDataSource - unable to change datasource dynamically

I am using AbstractRoutingDataSource so as to have multiple data base connections but, even after changing the contextHoler the database remains same
in a single API call it works perfectly fine but if i change the datasource within the same api call it doesn't change the datasource. if i have 2 api calls with different contextHolder it would work
#GetMapping("/getdata2")
public ClaimAmountAndTCD getDetails2() {
ClaimAmountAndTCD claimAmountAndTCD2 = serviceCls.getBook2Data();
System.out.println(claimAmountAndTCD2);
return claimAmountAndTCD2;
}
#GetMapping("/getdata")
public ClaimAmountAndTCD getDetails() {
ClaimAmountAndTCD claimAmountAndTCD = serviceCls.getBook1Data();
// ClaimAmountAndTCD claimAmountAndTCD2 = serviceCls.getBook2Data();
System.out.println(claimAmountAndTCD);
return claimAmountAndTCD;
}
below is my database config code
#Configuration
#Profile("dev")
#EnableTransactionManagement
#EnableJpaRepositories(entityManagerFactoryRef = "as400EntityManager", transactionManagerRef = "as400TransactionManager", basePackages = {
"com.spring.demo.repository" })
public class DevDataSourceConfig {
#Value("${db.gmrp.sxcd1.servername}")
private String servername;
#Value("${db.gmrp.sxcd1.username}")
private String username;
#Value("${db.gmrp.sxcd1.password}")
private String password;
#Value("${db.gmrp.sxcd1.libraries}")
private String libraries;
#Value("${db.naming}")
private String dbNaming;
#Value("${db.driver}")
private String driver;
#Value("${db.isolation}")
private String dbIsolation;
#Bean
public AS400JDBCDataSource dataSourceDevSXCD1() {
AS400JDBCDataSource as400DataSource = new AS400JDBCDataSource();
System.out.println(driver + " " + libraries + " " + username + " " + password);
as400DataSource.setDriver(driver);
as400DataSource.setLibraries(libraries);
as400DataSource.setServerName(servername);
as400DataSource.setNaming(dbNaming);
as400DataSource.setTransactionIsolation(dbIsolation);
if (password != null && !password.isEmpty() && password.trim().length() > 0) {
as400DataSource.setUser(username);
as400DataSource.setPassword(password);
}
return as400DataSource;
}
#Bean(name = "as400dev")
#Primary
public DataSource dataSource() {
AbstractRoutingDataSource dataSource = new RoutingDataSource();
Map<Object, Object> resolvedDataSources = new HashMap<>();
resolvedDataSources.put(DbType.DEVSXCD1, dataSourceDevSXCD1());
dataSource.setDefaultTargetDataSource(dataSourceDevSXCD1()); // default
dataSource.setTargetDataSources(resolvedDataSources);
return dataSource;
}
#Bean(name = "as400EntityManager")
#Primary
public LocalContainerEntityManagerFactoryBean as400DevEntityManager(EntityManagerFactoryBuilder builder,
#Qualifier("as400dev") DataSource as400DataSource) {
return builder.dataSource(as400DataSource).packages("com.spring.demo.entity").build();
}
#Bean(name = "as400TransactionManager")
public PlatformTransactionManager as400DevTransactionManager(
#Qualifier("as400EntityManager") EntityManagerFactory entityManager) {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(entityManager);
return transactionManager;
}
}
DbContextHoler class
package com.spring.demo.config;
public final class DbContextHolder {
private DbContextHolder() {
// default constructor
}
private static final ThreadLocal<DbType> contextHolder = new ThreadLocal<>();
// set the data source
public static void setDbType(DbType dbType) {
if (dbType == null) {
throw new NullPointerException();
}
contextHolder.set(dbType);
}
// get the current data source in use
public static DbType getDbType() {
return contextHolder.get();
}
// clear data source
public static void clearDbType() {
contextHolder.remove();
}
}
AbstractRoutingDataSource
package com.spring.demo.config;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
public class RoutingDataSource extends AbstractRoutingDataSource {
#Override
protected Object determineCurrentLookupKey() {
return DbContextHolder.getDbType();
}
}

Springboot Mongo db methods doesnt rollback transactions

Hi i am using 2 mongo template in my codebase
and my code saves data in mongo db using both mongo template and Spring repository class.save() .In methods which has #Transactional doesnt rollback DB changes even if some error occurs in code execution.
Below is the snippet of my mongoconfig configurations
#Configuration
#EnableMongoAuditing
#EnableMongoRepositories
public class MongoConfig extends AbstractMongoClientConfiguration {
#Value("${common.db.name}")
private String dbName;
#Value("${SPRING_DATA_MONGODB_URI}")
private String dbUrl;
public String getDbName() {
return dbName;
}
public void setDbName(String dbName) {
this.dbName = dbName;
}
#Override
protected String getDatabaseName() {
return dbName;
}
/**
* Below bean is created as we want to make our code transactional.
* #param dbFactory
* #return
*/
#Bean(name="primaryTransactionManager")
MongoTransactionManager transactionManager(MongoDatabaseFactory dbFactory) {
MongoTransactionManager transactionManager = new MongoTransactionManager(dbFactory);
transactionManager.setRollbackOnCommitFailure(true);
return transactionManager;
}
#Override
public MongoClient mongoClient() {
ConnectionString connectionString = new ConnectionString(dbUrl);
MongoClientSettings mongoClientSettings = MongoClientSettings.builder().applyConnectionString(connectionString).build();
return MongoClients.create(mongoClientSettings);
}
#Bean(name="primaryMongoTemplate")
public MongoTemplate mongoTemplate() {
return new MongoTemplate(mongoClient(), getDbName());
}
}
#Configuration
#EnableMongoRepositories
public class SecondaryMongoConfig {
#Value("${common.secondary.db.name}")
private String dbName;
#Value("${COMMON_SECONDARY_SPRING_DATA_MONGODB_URI}")
private String mongoDBURI;
public String getMongoDBURI() {
return mongoDBURI;
}
public void setMongoDBURI(String mongoDBURI) {
this.mongoDBURI = mongoDBURI;
}
public String getDbName() {
return dbName;
}
public void setDbName(String dbName) {
this.dbName = dbName;
}
/**
* Below bean is created as we want to make our code transactional.
* #param dbFactory
* #return
*/
#Bean(name="secondaryTransactionManager")
MongoTransactionManager transactionManager(MongoDatabaseFactory dbFactory) {
MongoTransactionManager transactionManager = new MongoTransactionManager(dbFactory);
transactionManager.setRollbackOnCommitFailure(true);
return new MongoTransactionManager(dbFactory);
}
#Bean(name="secondaryMongoClient")
public MongoClient secondaryMongoClient() {
ConnectionString connectionString = new ConnectionString(getMongoDBURI());
MongoClientSettings mongoClientSettings = MongoClientSettings.builder()
.applyConnectionString(connectionString)
.build();
return MongoClients.create(mongoClientSettings);
}
#Bean(name="secondaryMongoTemplate")
public MongoTemplate secondaryMongoTemplate() {
return new MongoTemplate(secondaryMongoClient(), getDbName());
}
}
Below is my sample code snippet of method annoted with #Transactional
#Override
#Transactional(transactionManager = "primaryTransactionManager",rollbackFor = {Exception.class})
public String addNewRecipe() throws Exception{
RecipesDAO recipesDao = new RecipesDAO();
recipesDao.setResourceId(UUID.randomUUID().toString());
recipeRepository.save(recipesDao);
throw new Exception();
// return "dummy";
}
I got solution after checking each and every method of classes used in my class MongoConfig.java.Below is the snippet of code which i had to use in order to rollback transactions when exceptions occured in code execution flow
#Bean(name="primaryMongoTemplate")
public MongoTemplate mongoTemplate() {
MongoTemplate primaryMongoTemplate = new MongoTemplate(mongoClient(), getDbName());
primaryMongoTemplate.setSessionSynchronization(SessionSynchronization.ALWAYS);
return primaryMongoTemplate;
}
#Bean(name="secondaryMongoTemplate")
public MongoTemplate secondaryMongoTemplate() {
MongoTemplate secondaryMongoTemplate = new MongoTemplate(secondaryMongoClient(), getDbName());
secondaryMongoTemplate.setSessionSynchronization(SessionSynchronization.ALWAYS);
return secondaryMongoTemplate;
}
Earlier wherever code of creating new or modifying mongodb docs was there like say somerepository.save(s) code used to create or modify documents immedialtely but by adding primaryMongoTemplate.setSessionSynchronization(SessionSynchronization.ALWAYS); and secondaryMongoTemplate.setSessionSynchronization(SessionSynchronization.ALWAYS);
code waits for complete execution flow and checks if any exception occured in this execution flow.It creates or modifies documents only after checking no exception occured in execution flow.

Spring's #Cacheable unable to cache data - Redis implementation

I am trying to use spring cache abstraction with Redis cache. I am unable to see the values in cache. Please help me if I am missing something in config :
As I am making the call multiple times actual fetch is happening. I tried connecting to same redis host port, I cant find there any keys as well.
PFB the implementation details.
CacheUtils.java :
#Slf4j
public class CacheUtils {
private final CustomerManagementClient customerManagementClient;
#Autowired
public CacheUtils(CustomerManagementClient customerManagementClient) {
this.customerManagementClient = customerManagementClient;
}
#Cacheable(value = "merchant-details", key = "#merchantEntityId")
public MerchantDetails getOrFetchMerchantDetails(OrderItemStatusChangeEvent event, MerchantType merchantType, String merchantEntityId) {
if (BUYER == merchantType) {
log.info("test - get buyer details");
CustomerDetails customerDetails =
customerManagementClient.getData(merchantEntityId);
String businessId = customerDetails.getBusinessId();
String phoneNumber = customerDetails.getPhoneNumber();
return MerchantDetails
.builder()
.merchantEntityId(merchantEntityId)
.businessId(businessId)
.businessName(customerDetails.getBusinessName())
.merchantType(merchantType)
.contactNumber(phoneNumber)
.build();
}
throw new InvalidInputException();
}
}
MainClass.java
#Slf4j
#Component
public class MainClass implements LogisticsPlanningService {
private final CacheUtils cacheUtils;
#Autowired
public LogisticsPlanningServiceImpl(CacheUtils cacheUtils) {
this.cacheUtils = cacheUtils;
}
private Set<LogisticsPlanningRequest> testMethod(Event event) {
MerchantDetails senderDetails = cacheUtils.getOrFetchMerchantDetails(event, SELLER, orderItem.getSellerId());
MerchantDetails receiverDetails = cacheUtils.getOrFetchMerchantDetails(event, BUYER, orderItem.getBuyerId());
}
}
RedisConfiguration.java
#Configuration
#EnableCaching
public class RedisConfiguration {
private String hostName;
private int port;
#Autowired
MarketPlaceServiceProperties properties;
#PostConstruct
public void init() {
hostName = properties.getRedisHostName();
port = Integer.parseInt(properties.getRedisPort());
}
#Bean
protected JedisConnectionFactory jedisConnectionFactory() {
RedisStandaloneConfiguration configuration = new RedisStandaloneConfiguration(hostName, port);
JedisConnectionFactory factory = new JedisConnectionFactory(configuration);
factory.afterPropertiesSet();
return factory;
}
public RedisCacheConfiguration getTestCacheConfig() {
RedisCacheConfiguration cacheConfiguration = RedisCacheConfiguration.defaultCacheConfig();
cacheConfiguration.prefixCacheNameWith("marketplace");
cacheConfiguration.disableCachingNullValues();
return cacheConfiguration;
}
// #Bean
// public RedisTemplate<String, Object> redisTemplate() {
// final RedisTemplate<String, Object> redisTemplate = new RedisTemplate<String, Object>();
// redisTemplate.setKeySerializer(new StringRedisSerializer());
// redisTemplate.setHashKeySerializer(new GenericToStringSerializer<>(Object.class));
// redisTemplate.setHashValueSerializer(new JdkSerializationRedisSerializer());
// redisTemplate.setValueSerializer(new JdkSerializationRedisSerializer());
// redisTemplate.setConnectionFactory(jedisConnectionFactory());
// return redisTemplate;
// }
}
service.properties :
redisHostName: redis.domain.prod.xyz.com
redisPort: 5400

Set Spring SolrDocument Collection name based on PropertyValue

I want to set values Spring SolrDocument Collection based on application.yml value.
#Data
#SolrDocument(collection = #Value("${solr.core}"))
public class SearchableProduct {
}
Hoi Michela,
Ok, I had the same Problem and I found a solution: SpEL
it is described in details here:Spring Data for Apache Solr
you have to add the EL-expression to the Annotation
#SolrDocument(collection = "#{#serverSolrContext.getCollectionName()}")
public class SOLREntity implements Serializable {
.....
}
you have to provide a the serverSolrContext Bean with the method getCollectionName().
#Value("${solr.core}")
private String core;
public String getCollectionName() {
return core;
}
you have to write in our application.properties the following core entry.
solr.core=myOwnCoreName
That's it actually, BUT
if you get the following Exception, so as I did:
org.springframework.expression.spel.SpelEvaluationException: EL1057E: No bean resolver registered in the context to resolve access to bean
You have to have the following in your Configuration Bean
#Configuration
#EnableSolrRepositories(basePackages = { "de.solr.db" })
#Profile("default")
#PropertySource("classpath:application.properties")
public class ServerSolrContext extends AbstractSolrConfiguration {
private static final Logger logger = LoggerFactory.getLogger(ServerSolrContext.class);
#Resource
private Environment environment;
#Value("${solr.core}")
private String core;
public String getCollectionName() {
return core;
}
#PostConstruct
public void init() {
System.out.println(core);
}
#Bean
public SolrClient solrClient() {
String url = environment.getProperty("solr.server.url");
String user = environment.getProperty("solr.server.user");
String password = environment.getProperty("solr.server.password");
CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT),
new UsernamePasswordCredentials(user, password));
SSLContext sslContext = null;
try {
sslContext = ReportConfiguration.getTrustAllContext();
}
catch (KeyManagementException | NoSuchAlgorithmException e) {
e.printStackTrace();
}
LayeredConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext);
HttpClient httpClient = HttpClientBuilder.create().setSSLSocketFactory(sslSocketFactory)
.addInterceptorFirst(new PreemptiveAuthInterceptor()).setDefaultCredentialsProvider(credentialsProvider)
.build();
SolrClient client = new HttpSolrClient.Builder().withHttpClient(httpClient).withBaseSolrUrl(url).build();
return client;
}
#Bean
#ConditionalOnMissingBean(name = "solrTemplate")
public SolrTemplate solrTemplate(#Qualifier("mySolrTemplate") SolrTemplate solrTemplate) {
return solrTemplate;
}
#Bean("mySolrTemplate")
public SolrTemplate mySolrTemplate(SolrClient solrClient, SolrConverter solrConverter) {
return new SolrTemplate(new HttpSolrClientFactory(solrClient), solrConverter);
}
#Override
public SolrClientFactory solrClientFactory() {
return new HttpSolrClientFactory(solrClient());
}
}
The last 3 Methods are doing the Trick, that cost me a while to find the right solution:
it is here, so actually I was lucky to find this:
Allow PropertyPlaceholders in #SolrDocument solrCoreName

How to copy a file from ftp server to local directory using sftp and spring boot,Java

I have codes for Inbound and Outbound channel adapter over SFTP. I want to call those method via spring boot scheduler not using polling. Looking for example how to call resultFileHandler() method
public class SftpConfig {
#Value("${nodephone.directory.sftp.host}")
private String sftpHost;
#Value("${nodephone.directory.sftp.port}")
private int sftpPort;
#Value("${nodephone.directory.sftp.user}")
private String sftpUser;
#Value("${nodephone.directory.sftp.password}")
private String sftpPasword;
#Value("${nodephone.directory.sftp.remote.directory.download}")
private String sftpRemoteDirectoryDownload;
#Value("${nodephone.directory.sftp.remote.directory.upload}")
private String sftpRemoteDirectoryUpload;
#Value("${nodephone.directory.sftp.remote.directory.filter}")
private String sftpRemoteDirectoryFilter;
#Value("${nodephone.directory.sftp.remote.directory.localDirectory}")
private String sftpLocalDirectory;
// private FtpOrderRequestHandler handler;
#Bean
public SessionFactory<LsEntry> sftpSessionFactory() {
DefaultSftpSessionFactory factory = new DefaultSftpSessionFactory(true);
factory.setHost(sftpHost);
factory.setPort(sftpPort);
factory.setUser(sftpUser);
factory.setPassword(sftpPasword);
factory.setAllowUnknownKeys(true);
return new CachingSessionFactory<LsEntry>(factory);
}
#Bean
public SftpInboundFileSynchronizer sftpInboundFileSynchronizer() {
SftpInboundFileSynchronizer fileSynchronizer = new SftpInboundFileSynchronizer(sftpSessionFactory());
fileSynchronizer.setDeleteRemoteFiles(true);
fileSynchronizer.setRemoteDirectory(sftpRemoteDirectoryDownload);
fileSynchronizer.setFilter(new SftpSimplePatternFileListFilter(sftpRemoteDirectoryFilter));
return fileSynchronizer;
}
#Bean
#InboundChannelAdapter(channel = "fromSftpChannel", poller = #Poller(cron = "0/5 * * * * *"))
public MessageSource<File> sftpMessageSource() {
SftpInboundFileSynchronizingMessageSource source = new SftpInboundFileSynchronizingMessageSource(
sftpInboundFileSynchronizer());
source.setAutoCreateLocalDirectory(true);
source.setLocalFilter(new AcceptOnceFileListFilter<File>());
source.setLocalDirectory(new File("/local"));
return source;
}
#Bean
#ServiceActivator(inputChannel = "fromSftpChannel")
public MessageHandler resultFileHandler() {
return new MessageHandler() {
#Override
public void handleMessage(Message<?> message) throws MessagingException {
System.out.println("********************** " + message.getPayload());
}
};
}
I have tested with Configuration annotation and it reads the file from the server, but I want to run this from Cron instead of polling, how do I call the method resultFileHandler()
I've never done this using Spring Integration in any production code although I did something like below, to download files from remote servers using sftp/ftp.
I'm only using the SftpOutboundGateway (there could be better ways), to call the "mget" method and fetch the payload (file).
#Configuration
#ConfigurationProperties(prefix = "sftp")
#Setter
#Getter
#EnableIntegration
public class RemoteFileConfiguration {
private String clients;
private String hosts;
private int ports;
private String users;
private String passwords;
#Bean(name = "clientSessionFactory")
public SessionFactory<LsEntry> clientSessionFactory() {
DefaultSftpSessionFactory sf = new DefaultSftpSessionFactory();
sf.setHost(hosts);
sf.setPort(ports);
sf.setUser(users);
sf.setPassword(passwords);
sf.setAllowUnknownKeys(true);
return new CachingSessionFactory<>(sf);
}
#Bean
#ServiceActivator(inputChannel = "sftpChannel")
public MessageHandler clientMessageHandler() {
SftpOutboundGateway sftpOutboundGateway = new SftpOutboundGateway(
clientSessionFactory(), "mget", "payload");
sftpOutboundGateway.setAutoCreateLocalDirectory(true);
sftpOutboundGateway.setLocalDirectory(new File("/users/localPath/client/INPUT/"));
sftpOutboundGateway.setFileExistsMode(FileExistsMode.REPLACE_IF_MODIFIED);
sftpOutboundGateway.setFilter(new AcceptOnceFileListFilter<>());
return sftpOutboundGateway;
}
}
#MessagingGateway
public interface SFTPGateway {
#Gateway(requestChannel = "sftpChannel")
List<File> get(String dir);
}
To ensure we use cron to execute this, I have used a Tasklet which is executed by Spring Batch, when I need it to be using a cron expression.
#Slf4j
#Getter
#Setter
public class RemoteFileInboundTasklet implements Tasklet {
private RemoteFileTemplate remoteFileTemplate;
private String remoteClientDir;
private String clientName;
private SFTPGateway sftpGateway;
#Override
public RepeatStatus execute(StepContribution stepContribution, ChunkContext chunkContext)
throws Exception {
List<File> files = sftpGateway.get(remoteClientDir);
if (CollectionUtils.isEmpty(files)) {
log.warn("No file was downloaded for client {}.", clientName);
return RepeatStatus.FINISHED;
}
log.info("Total file: {}", files.size());
return RepeatStatus.FINISHED;
}
}
NOTE: If you don't want to use Batch's Tasklet, you can use your #Component class and inject the Gateway to call "get" method.
#Autowired
private SFTPGateway sftpGateway;

Resources