I'm using the following beans for scheduling the quartz jobs:
package com.sap.brms.repositoryservice.cp.cf.app.config;
import java.io.IOException;
import java.util.Properties;
import javax.inject.Inject;
import javax.sql.DataSource;
import org.quartz.JobDetail;
import org.quartz.Scheduler;
import org.quartz.SimpleTrigger;
import org.quartz.Trigger;
import org.quartz.spi.JobFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.config.PropertiesFactoryBean;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.scheduling.quartz.CronTriggerFactoryBean;
import org.springframework.scheduling.quartz.JobDetailFactoryBean;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.scheduling.quartz.SimpleTriggerFactoryBean;
#Configuration
public class SchedulerConfig {
#Bean
public JobFactory jobFactory(ApplicationContext applicationContext)
{
AutowiringSpringBeanJobFactory jobFactory = new AutowiringSpringBeanJobFactory();
jobFactory.setApplicationContext(applicationContext);
return jobFactory;
}
#Bean
public Scheduler schedulerFactoryBean(#Qualifier("applicationDataSource") DataSource dataSource, JobFactory jobFactory,
#Qualifier("sampleJobTrigger") Trigger sampleJobTrigger) throws Exception {
SchedulerFactoryBean factory = new SchedulerFactoryBean();
// this allows to update triggers in DB when updating settings in config file:
factory.setOverwriteExistingJobs(true);
factory.setDataSource(dataSource);
factory.setJobFactory(jobFactory);
factory.setQuartzProperties(quartzProperties());
factory.afterPropertiesSet();
Scheduler scheduler = factory.getScheduler();
scheduler.setJobFactory(jobFactory);
scheduler.scheduleJob((JobDetail) sampleJobTrigger.getJobDataMap().get("jobDetail"), sampleJobTrigger);
scheduler.start();
return scheduler;
}
#Bean
public Properties quartzProperties() throws IOException {
PropertiesFactoryBean propertiesFactoryBean = new PropertiesFactoryBean();
propertiesFactoryBean.setLocation(new ClassPathResource("/quartz.properties"));
propertiesFactoryBean.afterPropertiesSet();
return propertiesFactoryBean.getObject();
}
#Bean
public JobDetailFactoryBean sampleJobDetail() {
return createJobDetail(SampleJob.class);
}
#Bean(name = "sampleJobTrigger")
public CronTriggerFactoryBean sampleJobTrigger(#Qualifier("sampleJobDetail") JobDetail jobDetail,
#Value("0 * * ? * *") String cronExpression) {
return createCronTrigger(jobDetail, "0 * * ? * *");
}
private static JobDetailFactoryBean createJobDetail(Class jobClass) {
JobDetailFactoryBean factoryBean = new JobDetailFactoryBean();
factoryBean.setJobClass(jobClass);
// job has to be durable to be stored in DB:
factoryBean.setDurability(true);
return factoryBean;
}
private static SimpleTriggerFactoryBean createTrigger(JobDetail jobDetail, long pollFrequencyMs) {
SimpleTriggerFactoryBean factoryBean = new SimpleTriggerFactoryBean();
factoryBean.setJobDetail(jobDetail);
factoryBean.setStartDelay(0L);
factoryBean.setRepeatInterval(pollFrequencyMs);
factoryBean.setRepeatCount(SimpleTrigger.REPEAT_INDEFINITELY);
// in case of misfire, ignore all missed triggers and continue :
factoryBean.setMisfireInstruction(SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_REMAINING_COUNT);
return factoryBean;
}
// Use this method for creating cron triggers instead of simple triggers:
private static CronTriggerFactoryBean createCronTrigger(JobDetail jobDetail, String cronExpression) {
CronTriggerFactoryBean factoryBean = new CronTriggerFactoryBean();
factoryBean.setJobDetail(jobDetail);
factoryBean.setCronExpression(cronExpression);
factoryBean.setMisfireInstruction(SimpleTrigger.MISFIRE_INSTRUCTION_FIRE_NOW);
return factoryBean;
}
}
My quartz properties are as following:
org.quartz.scheduler.instanceName=spring-boot-quartz-demo
org.quartz.scheduler.instanceId=AUTO
org.quartz.threadPool.threadCount=5
org.quartz.jobStore.class=org.quartz.impl.jdbcjobstore.JobStoreTX
org.quartz.jobStore.driverDelegateClass=org.quartz.impl.jdbcjobstore.StdJDBCDelegate
org.quartz.jobStore.misfireThreshold=60000
org.quartz.jobStore.tablePrefix=RULE_QRTZ_
org.quartz.scheduler.classLoadHelper.class=org.quartz.simpl.ThreadContextClassLoadHelper
org.quartz.jobStore.isClustered=true
org.quartz.jobStore.clusterCheckinInterval=20000
The class SampleJob is:
public class SampleJob implements Job {
private static Logger logger = LoggerFactory.getLogger(SampleJob.class);
public SampleJob() {}
#Override
public void execute(JobExecutionContext context) throws JobExecutionException {
logger.info("Metering Job called");
}
}
I have a postgres database installed on my local system, and when I run the spring boot application, the application runs however the SampleJob is never triggered. The application is a spring boot application with the version being 1.2.5.
Related
I am trying to configure inbound and outbound adaptors as provided in the spring batch remote partitioning samples for Manager and worker beans. Having difficulty since they are configured in context of AMQPConnectionFactory.
However when I follow spring integration samples, there is no class which can provide Connection Factory. Help appreciated.
Below is sample code:-
import com.microsoft.azure.spring.integration.core.DefaultMessageHandler;
import com.microsoft.azure.spring.integration.core.api.CheckpointConfig;
import com.microsoft.azure.spring.integration.core.api.CheckpointMode;
import com.microsoft.azure.spring.integration.servicebus.inbound.ServiceBusQueueInboundChannelAdapter;
import com.microsoft.azure.spring.integration.servicebus.queue.ServiceBusQueueOperation;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.integration.partition.RemotePartitioningManagerStepBuilderFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.integration.annotation.IntegrationComponentScan;
import org.springframework.integration.annotation.ServiceActivator;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.dsl.IntegrationFlow;
import org.springframework.integration.dsl.IntegrationFlows;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.MessageHandler;
import org.springframework.util.concurrent.ListenableFutureCallback;
#Configuration
#IntegrationComponentScan
public class ManagerConfiguration {
private static final int GRID_SIZE = 3;
private static final String REQUEST_QUEUE_NAME = "digital.intg.batch.cm.request";
private static final String REPLY_QUEUE_NAME = "digital.intg.batch.cm.reply";
private static final String MANAGER_INPUT_CHANNEL = "manager.input";
private static final String MANGER_OUTPUT_CHANNEL = "manager.output";
private static final Log LOGGER = LogFactory.getLog(ManagerConfiguration.class);
private final JobBuilderFactory jobBuilderFactory;
private final RemotePartitioningManagerStepBuilderFactory managerStepBuilderFactory;
public ManagerConfiguration(JobBuilderFactory jobBuilderFactory,
RemotePartitioningManagerStepBuilderFactory managerStepBuilderFactory
) {
this.jobBuilderFactory = jobBuilderFactory;
this.managerStepBuilderFactory = managerStepBuilderFactory;
}
/*
* Configure outbound flow (requests going to workers)
*/
#Bean( name = MANGER_OUTPUT_CHANNEL )
public DirectChannel managerRequests() {
return new DirectChannel();
}
/*
* Configure inbound flow (replies coming from workers)
*/
#Bean( name = MANAGER_INPUT_CHANNEL )
public DirectChannel managerReplies() {
return new DirectChannel();
}
#Bean
public ServiceBusQueueInboundChannelAdapter managerQueueMessageChannelAdapter(
#Qualifier( MANAGER_INPUT_CHANNEL ) MessageChannel inputChannel, ServiceBusQueueOperation queueOperation) {
queueOperation.setCheckpointConfig(CheckpointConfig.builder().checkpointMode(CheckpointMode.MANUAL).build());
ServiceBusQueueInboundChannelAdapter adapter = new ServiceBusQueueInboundChannelAdapter(REPLY_QUEUE_NAME,
queueOperation);
adapter.setOutputChannel(inputChannel);
return adapter;
}
#Bean
#ServiceActivator( inputChannel = MANGER_OUTPUT_CHANNEL )
public MessageHandler managerQueueMessageSender(ServiceBusQueueOperation queueOperation) {
DefaultMessageHandler handler = new DefaultMessageHandler(REQUEST_QUEUE_NAME, queueOperation);
handler.setSendCallback(new ListenableFutureCallback<Void>() {
#Override
public void onSuccess(Void result) {
LOGGER.info("Manager Request Message was sent successfully.");
}
#Override
public void onFailure(Throwable ex) {
LOGGER.info("There was an error sending request message to worker.");
}
});
return handler;
}
#Bean
public IntegrationFlow managerOutboundFlow(MessageHandler managerQueueMessageSender) {
return IntegrationFlows
.from(managerRequests())
.handle(managerQueueMessageSender)
.get();
}
#Bean
public IntegrationFlow managerInboundFlow(ServiceBusQueueInboundChannelAdapter managerQueueMessageChannelAdapter) {
return IntegrationFlows
.from(managerQueueMessageChannelAdapter)
.channel(managerReplies())
.get();
}
/*
* Configure the manager step
*/
#Bean
public Step managerStep() {
return this.managerStepBuilderFactory.get("managerStep")
.partitioner("workerStep", new BasicPartitioner())
.gridSize(GRID_SIZE)
.outputChannel(managerRequests())
.inputChannel(managerReplies())
//.aggregator()
.build();
}
#Bean
public Job remotePartitioningJob() {
return this.jobBuilderFactory.get("remotePartitioningJob")
.start(managerStep())
.build();
}
}
The sample uses ActiveMQ because it is easily embeddable in a JVM for our tests and samples. But you can use any other broker that you want.
?? what should I inject here?
You should inject any dependency required by the queueMessageChannelAdapter handler:
.handle(queueMessageChannelAdapter)
Does anyone have good example of Spring Batch (Using Annotation) to cache a reference table which will be accessible to processor ?
I just need a simple cache, run a query which returns some byte[] and keep it in memory till the time job is executing.
Appreciate any help on this topic.
Thanks !
A JobExecutionListener can be used to populate the cache with reference data before the job is executed and clear the cache after the job is finished.
Here is an example:
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobExecutionListener;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.core.step.tasklet.Tasklet;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.CacheManager;
import org.springframework.cache.concurrent.ConcurrentMapCacheManager;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
#Configuration
#EnableBatchProcessing
public class MyJob {
private JobBuilderFactory jobs;
private StepBuilderFactory steps;
public MyJob(JobBuilderFactory jobs, StepBuilderFactory steps) {
this.jobs = jobs;
this.steps = steps;
}
#Bean
public CacheManager cacheManager() {
return new ConcurrentMapCacheManager(); // return the implementation you want
}
#Bean
public Tasklet tasklet() {
return new MyTasklet(cacheManager());
}
#Bean
public Step step() {
return steps.get("step")
.tasklet(tasklet())
.build();
}
#Bean
public JobExecutionListener jobExecutionListener() {
return new CachingJobExecutionListener(cacheManager());
}
#Bean
public Job job() {
return jobs.get("job")
.start(step())
.listener(jobExecutionListener())
.build();
}
class MyTasklet implements Tasklet {
private CacheManager cacheManager;
public MyTasklet(CacheManager cacheManager) {
this.cacheManager = cacheManager;
}
#Override
public RepeatStatus execute(StepContribution contribution, ChunkContext chunkContext) throws Exception {
String name = (String) cacheManager.getCache("referenceData").get("foo").get();
System.out.println("Hello " + name);
return RepeatStatus.FINISHED;
}
}
class CachingJobExecutionListener implements JobExecutionListener {
private CacheManager cacheManager;
public CachingJobExecutionListener(CacheManager cacheManager) {
this.cacheManager = cacheManager;
}
#Override
public void beforeJob(JobExecution jobExecution) {
// populate cache as needed. Can use a jdbcTemplate to query the db here and populate the cache
cacheManager.getCache("referenceData").put("foo", "bar");
}
#Override
public void afterJob(JobExecution jobExecution) {
// clear cache when the job is finished
cacheManager.getCache("referenceData").clear();
}
}
public static void main(String[] args) throws Exception {
ApplicationContext context = new AnnotationConfigApplicationContext(MyJob.class);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
Job job = context.getBean(Job.class);
jobLauncher.run(job, new JobParameters());
}
}
When executed, it prints:
Hello bar
which means data is correctly retrieved from the cache. You would need to adapt the sample to query the database and populate the cache (See comments in code).
Hope this helps.
You can use ehcache-jsr107 implementation. Very quick to setup.
Spring and ehcache integration example is available here.
You should be able to setup same with spring batch also.
Hope this hleps
I am working on an application and have been asked to implement a scheduled spring batch job. I have set up a configuration file where I set a #Bean ResourcelessTransactionManager but it seems to mess with the persistence.xml.
There is already a persistence xml in an other module, there is no compilation error. I get a NoUniqueBeanDefinitionException when I am requesting a page that returns a view item.
This is the error:
Caused by: org.springframework.beans.factory.NoUniqueBeanDefinitionException: No qualifying bean of type [org.springframework.transaction.PlatformTransactionManager] is defined: expected single matching bean but found 2: txManager,transactionManager
at org.springframework.beans.factory.support.DefaultListableBeanFactory.getBean(DefaultListableBeanFactory.java:365)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.getBean(DefaultListableBeanFactory.java:331)
at org.springframework.transaction.interceptor.TransactionAspectSupport.determineTransactionManager(TransactionAspectSupport.java:366)
at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:271)
at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:96)
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:179)
at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:653)
at com.mypackage.services.MyClassService$$EnhancerBySpringCGLIB$$9e8bf16f.registryEvents(<generated>)
at com.mypackage.controllers.MyClassSearchView.init(MyClassSearchView.java:75)
... 168 more
Is there a way to tell spring batch to use the data source defined in the persistence.xml of the other module or maybe is this caused by something else?
I created separate BatchScheduler java class as below and included it in BatchConfiguration java class. I am sharing both the classes. BatchConfiguration contains another jpaTransactionManager.
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.core.repository.support.MapJobRepositoryFactoryBean;
import org.springframework.batch.support.transaction.ResourcelessTransactionManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.EnableScheduling;
#Configuration
#EnableScheduling
public class BatchScheduler {
#Bean
public ResourcelessTransactionManager resourcelessTransactionManager() {
return new ResourcelessTransactionManager();
}
#Bean
public MapJobRepositoryFactoryBean mapJobRepositoryFactory(
ResourcelessTransactionManager resourcelessTransactionManager) throws Exception {
MapJobRepositoryFactoryBean factory = new
MapJobRepositoryFactoryBean(resourcelessTransactionManager);
factory.afterPropertiesSet();
return factory;
}
#Bean
public JobRepository jobRepository(
MapJobRepositoryFactoryBean factory) throws Exception {
return factory.getObject();
}
#Bean
public SimpleJobLauncher jobLauncher(JobRepository jobRepository) {
SimpleJobLauncher launcher = new SimpleJobLauncher();
launcher.setJobRepository(jobRepository);
return launcher;
}
}
BatchConfiguration contains another jpaTransactionManager.
import java.io.IOException;
import java.util.Date;
import java.util.Properties;
import javax.sql.DataSource;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.database.JpaItemWriter;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.core.env.Environment;
import org.springframework.core.io.FileSystemResource;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.JpaVendorAdapter;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.Database;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import org.springframework.scheduling.TaskScheduler;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.scheduling.concurrent.ConcurrentTaskScheduler;
import org.springframework.transaction.PlatformTransactionManager;
import trade.api.common.constants.Constants;
import trade.api.entity.SecurityEntity;
import trade.api.trade.batch.item.processor.SecurityItemProcessor;
import trade.api.trade.batch.item.reader.NseSecurityReader;
import trade.api.trade.batch.notification.listener.SecurityJobCompletionNotificationListener;
import trade.api.trade.batch.tasklet.SecurityReaderTasklet;
import trade.api.vo.SecurityVO;
#Configuration
#EnableBatchProcessing
#EnableScheduling
#Import({OhlcMonthBatchConfiguration.class, OhlcWeekBatchConfiguration.class, OhlcDayBatchConfiguration.class, OhlcMinuteBatchConfiguration.class})
public class BatchConfiguration {
private static final String OVERRIDDEN_BY_EXPRESSION = null;
/*
Load the properties
*/
#Value("${database.driver}")
private String databaseDriver;
#Value("${database.url}")
private String databaseUrl;
#Value("${database.username}")
private String databaseUsername;
#Value("${database.password}")
private String databasePassword;
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
private JobLauncher jobLauncher;
#Bean
public TaskScheduler taskScheduler() {
return new ConcurrentTaskScheduler();
}
//second, minute, hour, day of month, month, day(s) of week
//#Scheduled(cron = "0 0 21 * * 1-5") on week days
#Scheduled(cron="${schedule.insert.security}")
public void importSecuritySchedule() throws Exception {
System.out.println("Job Started at :" + new Date());
JobParameters param = new JobParametersBuilder().addString("JobID",
String.valueOf(System.currentTimeMillis())).toJobParameters();
JobExecution execution = jobLauncher.run(importSecuritesJob(), param);
System.out.println("Job finished with status :" + execution.getStatus());
}
#Bean SecurityJobCompletionNotificationListener securityJobCompletionNotificationListener() {
return new SecurityJobCompletionNotificationListener();
}
//Import Equity OHLC End
//Import Equity Start
// tag::readerwriterprocessor[]
#Bean
public SecurityReaderTasklet securityReaderTasklet() {
return new SecurityReaderTasklet();
}
#Bean
#StepScope
public NseSecurityReader<SecurityVO> nseSecurityReader(#Value("#{jobExecutionContext["+Constants.SECURITY_DOWNLOAD_FILE+"]}") String pathToFile) throws IOException {
NseSecurityReader<SecurityVO> reader = new NseSecurityReader<SecurityVO>();
reader.setLinesToSkip(1);
reader.setResource(new FileSystemResource(pathToFile));
reader.setLineMapper(new DefaultLineMapper<SecurityVO>() {{
setLineTokenizer(new DelimitedLineTokenizer() {{
setNames(new String[] { "symbol", "nameOfCompany", "series", "dateOfListing", "paidUpValue", "marketLot", "isinNumber", "faceValue" });
}});
setFieldSetMapper(new BeanWrapperFieldSetMapper<SecurityVO>() {{
setTargetType(SecurityVO.class);
}});
}});
return reader;
}
#Bean
public SecurityItemProcessor processor() {
return new SecurityItemProcessor();
}
#Bean
public JpaItemWriter<SecurityEntity> writer() {
JpaItemWriter<SecurityEntity> writer = new JpaItemWriter<SecurityEntity>();
writer.setEntityManagerFactory(entityManagerFactory().getObject());
return writer;
}
// end::readerwriterprocessor[]
// tag::jobstep[]
#Bean
public Job importSecuritesJob() throws IOException {
return jobBuilderFactory.get("importSecuritesJob")
.incrementer(new RunIdIncrementer())
.listener(securityJobCompletionNotificationListener())
.start(downloadSecurityStep())
.next(insertSecurityStep())
.build();
}
#Bean
public Step downloadSecurityStep() throws IOException {
return stepBuilderFactory.get("downloadSecurityStep")
.tasklet(securityReaderTasklet())
.build();
}
#Bean
public Step insertSecurityStep() throws IOException {
return stepBuilderFactory.get("insertSecurityStep")
.transactionManager(jpaTransactionManager())
.<SecurityVO, SecurityEntity> chunk(100)
.reader(nseSecurityReader(OVERRIDDEN_BY_EXPRESSION))
.processor(processor())
.writer(writer())
.build();
}
// end::jobstep[]
//Import Equity End
#Bean
public DataSource dataSource() {
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName(databaseDriver);
dataSource.setUrl(databaseUrl);
dataSource.setUsername(databaseUsername);
dataSource.setPassword(databasePassword);
return dataSource;
}
#Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean lef = new LocalContainerEntityManagerFactoryBean();
lef.setPackagesToScan("trade.api.entity");
lef.setDataSource(dataSource());
lef.setJpaVendorAdapter(jpaVendorAdapter());
lef.setJpaProperties(new Properties());
return lef;
}
#Bean
public JpaVendorAdapter jpaVendorAdapter() {
HibernateJpaVendorAdapter jpaVendorAdapter = new HibernateJpaVendorAdapter();
jpaVendorAdapter.setDatabase(Database.MYSQL);
jpaVendorAdapter.setGenerateDdl(true);
jpaVendorAdapter.setShowSql(false);
jpaVendorAdapter.setDatabasePlatform("org.hibernate.dialect.MySQLDialect");
return jpaVendorAdapter;
}
#Bean
#Qualifier("jpaTransactionManager")
public PlatformTransactionManager jpaTransactionManager() {
return new JpaTransactionManager(entityManagerFactory().getObject());
}
#Bean
public static PropertySourcesPlaceholderConfigurer dataProperties(Environment environment) throws IOException {
String[] activeProfiles = environment.getActiveProfiles();
final PropertySourcesPlaceholderConfigurer ppc = new PropertySourcesPlaceholderConfigurer();
ppc.setLocations(new PathMatchingResourcePatternResolver().getResources("classpath*:application-"+activeProfiles[0]+".properties"));
return ppc;
}
//// Import Security End
}
Problem solved. There was a PlatformTransactionManager bean located in an other configuration file. I set it as #Primary and now the problem is fixed. Thanks everyone for the help.
Database writes are not rolling back as I expected.
I've spent many hours reading software documentation and web postings.
I have not been able to resolve the issue.
I'm hoping you folks can help me.
Scenario
My application pulls a message from a queue, extracts data from the
message, and writes it to a database.
The method that writes to the database does 2 SQL inserts.
The second insert gets an exception: org.postgresql.util.PSQLException: ERROR: duplicate key value violates unique constraint "table2_PK"
However, the first insert is still getting committed to the database.
Relevant Software
spring-boot 1.2.5.RELEASE
atomikos-util 3.9.3 (from spring-boot-starter-jta-atomikos 1.2.5.RELEASE)
jooq 3.6.2
postgresql 9.4-1201-jdbc41
activemq-client 5.1.2
Application Code - I've pasted the relevant parts of my code below.
GdmServer - my "server" class, which also declares Spring bean
configurations
PortSIQueue - my JMS MessageListener class
Kernel - my worker class, i.e. the code that writes to database, a Spring bean invoked by my MessageListener
I'd appreciate any help anyone can offer.
Thanks
package com.sm.gis.gdm;
import javax.transaction.SystemException;
import javax.transaction.UserTransaction;
import org.apache.activemq.ActiveMQXAConnectionFactory;
import org.jooq.DSLContext;
import org.jooq.SQLDialect;
import org.jooq.impl.DSL;
import org.postgresql.xa.PGXADataSource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.jta.atomikos.AtomikosDataSourceBean;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.jms.annotation.EnableJms;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.jms.listener.DefaultMessageListenerContainer;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.transaction.jta.JtaTransactionManager;
import com.atomikos.icatch.jta.UserTransactionImp;
import com.atomikos.icatch.jta.UserTransactionManager;
import com.atomikos.jms.AtomikosConnectionFactoryBean;
import com.sm.gis.config.GisConfig;
#SpringBootApplication
#EnableJms
#EnableTransactionManagement
public class GdmServer {
#Autowired
ConfigurableApplicationContext context;
#Autowired
GisConfig gisConfig;
/**
* Starts the GDM Server
*/
public static void main(String[] args) {
SpringApplication.run(GdmServer.class, args);
}
// -------------------------------------------------------------------------
// Spring bean configurations
// -------------------------------------------------------------------------
#Bean
GisConfig gisConfig() {
return new GisConfig();
}
#Bean
PlatformTransactionManager transactionManager() throws SystemException {
JtaTransactionManager manager = new JtaTransactionManager();
manager.setTransactionManager( atomikosUserTransactionManager() );
manager.setUserTransaction ( atomikosUserTransaction() );
manager.setAllowCustomIsolationLevels(true);
return manager;
}
#Bean(initMethod = "init", destroyMethod = "close")
UserTransactionManager atomikosUserTransactionManager() throws SystemException {
UserTransactionManager manager = new UserTransactionManager();
manager.setStartupTransactionService(true);
manager.setForceShutdown(false);
manager.setTransactionTimeout( gisConfig.getTxnTimeout() );
return manager;
}
#Bean
UserTransaction atomikosUserTransaction() {
return new UserTransactionImp();
}
#Bean(initMethod = "init", destroyMethod = "close")
AtomikosDataSourceBean atomikosJdbcConnectionFactory() {
PGXADataSource pgXADataSource = new PGXADataSource();
pgXADataSource.setUrl( gisConfig.getGdbUrl() );
pgXADataSource.setUser( gisConfig.getGdbUser() );
pgXADataSource.setPassword( gisConfig.getGdbPassword() );
AtomikosDataSourceBean xaDataSource = new AtomikosDataSourceBean();
xaDataSource.setXaDataSource(pgXADataSource);
xaDataSource.setUniqueResourceName("gdb");
xaDataSource.setPoolSize( gisConfig.getGdbPoolSize() );
return xaDataSource;
}
#Bean
DSLContext dslContext() {
DSLContext dslContext = DSL.using(atomikosJdbcConnectionFactory(), SQLDialect.POSTGRES);
return dslContext;
}
#Bean(initMethod = "init", destroyMethod = "close")
AtomikosConnectionFactoryBean atomikosJmsConnectionFactory() {
ActiveMQXAConnectionFactory activeMQXAConnectionFactory = new ActiveMQXAConnectionFactory();
activeMQXAConnectionFactory.setBrokerURL( gisConfig.getMomBrokerUrl() );
AtomikosConnectionFactoryBean atomikosConnectionFactoryBean = new AtomikosConnectionFactoryBean();
atomikosConnectionFactoryBean.setUniqueResourceName("activeMQBroker");
atomikosConnectionFactoryBean.setXaConnectionFactory(activeMQXAConnectionFactory);
atomikosConnectionFactoryBean.setLocalTransactionMode(false);
return atomikosConnectionFactoryBean;
}
#Bean
DefaultMessageListenerContainer queueWrapperGDM() throws SystemException {
DefaultMessageListenerContainer messageSource = new DefaultMessageListenerContainer();
messageSource.setTransactionManager( transactionManager() );
messageSource.setConnectionFactory( atomikosJmsConnectionFactory() );
messageSource.setSessionTransacted(true);
messageSource.setConcurrentConsumers(1);
messageSource.setReceiveTimeout( gisConfig.getMomQueueGdmTimeoutReceive() );
messageSource.setDestinationName( gisConfig.getMomQueueGdmName() );
messageSource.setMessageListener( context.getBean("portSIQueue") );
return messageSource;
}
#Bean
JmsTemplate queueWrapperLIMS() {
JmsTemplate jmsTemplate = new JmsTemplate();
jmsTemplate.setConnectionFactory( atomikosJmsConnectionFactory() );
jmsTemplate.setDefaultDestinationName( gisConfig.getMomQueueLimsName() );
jmsTemplate.setSessionTransacted(true);
return jmsTemplate;
}
}
package com.sm.gis.gdm.ports;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.jms.TextMessage;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import com.sm.gis.gdm.kernel.Kernel;
import com.sm.gis.sdo.xml.marshaler.GisMessageMarshaler;
import com.sm.gis.sdo.xml.service.message.CreateGenomicTestOrderInGIS;
#Component
public class PortSIQueue implements MessageListener {
#Autowired
ConfigurableApplicationContext context;
#Autowired
GisMessageMarshaler queueMessageMashaler;
#Autowired
Kernel kernel;
#Override
#Transactional(rollbackFor = {Throwable.class})
public void onMessage(Message jmsMessage) {
TextMessage jmsTextMessage = (TextMessage) jmsMessage;
// Extract JMS message body...
String jmsPayload = "";
try {
jmsPayload = jmsTextMessage.getText();
} catch (JMSException e) {
throw new RuntimeException(e);
}
// Marshal XML text to object...
Object gisMessage = queueMessageMashaler.toObject(jmsPayload);
kernel.receiveCreateGenomicTestOrderInGIS( (CreateGenomicTestOrderInGIS) gisMessage );
}
}
package com.sm.gis.gdm.kernel;
import org.jooq.DSLContext;
import org.jooq.impl.DSL;
#Component
public class Kernel {
#Autowired
ConfigurableApplicationContext context;
#Autowired
DSLContext dslContext;
<snip>
public void receiveCreateGenomicTestOrderInGIS(CreateGenomicTestOrderInGIS message) {
dslContext.insertInto(table1)
.set(...)
.set(...)
.execute();
dslContext.insertInto(table2)
.set(...)
.set(...)
.execute();
}
<snip>
}
I'm an idiot.
Turns out the issue was due to a defect in my application logic.
The ActiveMQ component retries a message if the first attempt to process the message fails with an exception. The transaction created for the first attempt rolled back correctly. It was the second attempt that succeeded. The retry succeeded because a database sequence number was incremented by the application logic during the first attempt, and the second attempt did not result in a duplicate key violation. After correcting the application logic defect, since in my application no message is retry-able anyway, I turned off retry, too.
I apologize for wasting the time of those who read my post.
Along the way, I did make some changes to the implementation. The changes make certain default values explicit choices. I left those changes in because I believe they will make it easier for other developers on my team to understand what's happening more quickly. I also left the JOOQ exception translation code in place because it would be needed in other circumstances and appears to be best practice anyway.
I've included the modified code in this post, in case others might find it useful.
package com.sm.gis.gdm;
import javax.transaction.SystemException;
import javax.transaction.UserTransaction;
import org.apache.activemq.ActiveMQXAConnectionFactory;
import org.apache.activemq.RedeliveryPolicy;
import org.jooq.DSLContext;
import org.jooq.SQLDialect;
import org.jooq.impl.DefaultConfiguration;
import org.jooq.impl.DefaultDSLContext;
import org.jooq.impl.DefaultExecuteListenerProvider;
import org.postgresql.xa.PGXADataSource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.jta.atomikos.AtomikosDataSourceBean;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.DependsOn;
import org.springframework.jms.annotation.EnableJms;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.jms.listener.DefaultMessageListenerContainer;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.transaction.jta.JtaTransactionManager;
import com.atomikos.icatch.jta.UserTransactionImp;
import com.atomikos.icatch.jta.UserTransactionManager;
import com.atomikos.jms.AtomikosConnectionFactoryBean;
import com.sm.gis.config.GisConfig;
#SpringBootApplication
#EnableJms
#EnableTransactionManagement(proxyTargetClass=true)
public class GdmServer {
#Autowired
ConfigurableApplicationContext context;
#Autowired
GisConfig gisConfig;
/**
* Starts the GDM Server
*/
public static void main(String[] args) {
SpringApplication.run(GdmServer.class, args);
}
// -------------------------------------------------------------------------
// Spring bean configurations
// -------------------------------------------------------------------------
#Bean
GisConfig gisConfig() {
return new GisConfig();
}
#Bean
#DependsOn({ "atomikosUserTransactionManager", "atomikosUserTransaction", "atomikosJdbcConnectionFactory", "atomikosJmsConnectionFactory" })
PlatformTransactionManager transactionManager() throws SystemException {
JtaTransactionManager manager = new JtaTransactionManager();
manager.setTransactionManager( atomikosUserTransactionManager() );
manager.setUserTransaction( atomikosUserTransaction() );
manager.setAllowCustomIsolationLevels(true);
manager.afterPropertiesSet();
return manager;
}
#Bean(initMethod = "init", destroyMethod = "close")
UserTransactionManager atomikosUserTransactionManager() throws SystemException {
UserTransactionManager manager = new UserTransactionManager();
manager.setStartupTransactionService(true);
manager.setForceShutdown(false);
manager.setTransactionTimeout( gisConfig.getTxnTimeout() );
return manager;
}
#Bean
UserTransaction atomikosUserTransaction() {
return new UserTransactionImp();
}
#Bean(initMethod = "init", destroyMethod = "close")
AtomikosDataSourceBean atomikosJdbcConnectionFactory() throws Exception {
PGXADataSource pgXADataSource = new PGXADataSource();
pgXADataSource.setUrl( gisConfig.getGdbUrl() );
pgXADataSource.setUser( gisConfig.getGdbUser() );
pgXADataSource.setPassword( gisConfig.getGdbPassword() );
AtomikosDataSourceBean xaDataSource = new AtomikosDataSourceBean();
xaDataSource.setXaDataSource(pgXADataSource);
xaDataSource.setUniqueResourceName("gdb");
xaDataSource.setPoolSize( gisConfig.getGdbPoolSize() );
xaDataSource.setTestQuery("SELECT 1");
xaDataSource.afterPropertiesSet();
return xaDataSource;
}
#Bean
#DependsOn({ "atomikosJdbcConnectionFactory" })
DSLContext dslContext() throws Exception {
DefaultConfiguration jooqConfiguration = new DefaultConfiguration();
jooqConfiguration.set( SQLDialect.POSTGRES_9_4 );
jooqConfiguration.set( atomikosJdbcConnectionFactory() );
jooqConfiguration.set( new DefaultExecuteListenerProvider(new JooqToSpringExceptionTransformer()) );
DSLContext dslContext = new DefaultDSLContext(jooqConfiguration);
return dslContext;
}
#Bean(initMethod = "init", destroyMethod = "close")
AtomikosConnectionFactoryBean atomikosJmsConnectionFactory() {
RedeliveryPolicy redeliveryPolicy = new RedeliveryPolicy();
redeliveryPolicy.setInitialRedeliveryDelay(0);
redeliveryPolicy.setRedeliveryDelay(0);
redeliveryPolicy.setUseExponentialBackOff(false);
redeliveryPolicy.setMaximumRedeliveries(0);
ActiveMQXAConnectionFactory activeMQXAConnectionFactory = new ActiveMQXAConnectionFactory();
activeMQXAConnectionFactory.setBrokerURL( gisConfig.getMomBrokerUrl() );
activeMQXAConnectionFactory.setRedeliveryPolicy(redeliveryPolicy);
AtomikosConnectionFactoryBean atomikosConnectionFactoryBean = new AtomikosConnectionFactoryBean();
atomikosConnectionFactoryBean.setUniqueResourceName("activeMQBroker");
atomikosConnectionFactoryBean.setXaConnectionFactory(activeMQXAConnectionFactory);
atomikosConnectionFactoryBean.setLocalTransactionMode(false);
return atomikosConnectionFactoryBean;
}
#Bean
#DependsOn({ "transactionManager" })
DefaultMessageListenerContainer queueWrapperGDM() throws SystemException {
DefaultMessageListenerContainer messageSource = new DefaultMessageListenerContainer();
messageSource.setTransactionManager( transactionManager() );
messageSource.setConnectionFactory( atomikosJmsConnectionFactory() );
messageSource.setSessionTransacted(true);
messageSource.setSessionAcknowledgeMode(0);
messageSource.setConcurrentConsumers(1);
messageSource.setReceiveTimeout( gisConfig.getMomQueueGdmTimeoutReceive() );
messageSource.setDestinationName( gisConfig.getMomQueueGdmName() );
messageSource.setMessageListener( context.getBean("portSIQueue") );
messageSource.afterPropertiesSet();
return messageSource;
}
#Bean
#DependsOn({ "transactionManager" })
JmsTemplate queueWrapperLIMS() {
JmsTemplate jmsTemplate = new JmsTemplate();
jmsTemplate.setConnectionFactory( atomikosJmsConnectionFactory() );
jmsTemplate.setDefaultDestinationName( gisConfig.getMomQueueLimsName() );
jmsTemplate.setSessionTransacted(true);
jmsTemplate.setSessionAcknowledgeMode(0);
return jmsTemplate;
}
}
package com.sm.gis.gdm.ports;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.jms.TextMessage;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import com.sm.gis.gdm.kernel.Kernel;
import com.sm.gis.sdo.xml.marshaler.GisMessageMarshaler;
import com.sm.gis.sdo.xml.service.message.CreateGenomicTestOrderInGIS;
#Component
public class PortSIQueue implements MessageListener {
#Autowired
ConfigurableApplicationContext context;
#Autowired
GisMessageMarshaler queueMessageMashaler;
#Autowired
Kernel kernel;
#Override
#Transactional(propagation = Propagation.REQUIRES_NEW, rollbackFor = {Throwable.class})
public void onMessage(Message jmsMessage) {
TextMessage jmsTextMessage = (TextMessage) jmsMessage;
// Extract JMS message body...
String jmsPayload = "";
try {
jmsPayload = jmsTextMessage.getText();
} catch (JMSException e) {
throw new RuntimeException(e);
}
// Marshal XML text to object...
Object gisMessage = queueMessageMashaler.toObject(jmsPayload);
kernel.receiveCreateGenomicTestOrderInGIS( (CreateGenomicTestOrderInGIS) gisMessage );
}
package com.sm.gis.gdm.kernel;
import org.jooq.DSLContext;
#Component
public class Kernel {
#Autowired
ConfigurableApplicationContext context;
#Autowired
DSLContext dslContext;
<snip>
public void receiveCreateGenomicTestOrderInGIS(CreateGenomicTestOrderInGIS message) {
dslContext.insertInto(table1)
.set(...)
.set(...)
.execute();
dslContext.insertInto(table2)
.set(...)
.set(...)
.execute();
}
<snip>
}
Had similar issues using the Transactional Annotation. Had to explicitly handle transactions using (begin..commit)/rollback in try/catch. Not very elegant and repetitive but works. TransactionContext is saved in the current thread. so your begin method does not need to return the ctx object. TransactionContext can be instantiated using your DSLContext.configuration().
public class DataSourceTransactionProvider implements TransactionProvider {
private final DataSourceTransactionManager txMgr;
#Inject
public DataSourceTransactionProvider(DataSourceTransactionManager transactionManager) {
this.txMgr = transactionManager;
}
#Override
public void begin(TransactionContext ctx) throws DataAccessException {
TransactionStatus transactionStatus = txMgr.getTransaction(new DefaultTransactionDefinition(TransactionDefinition.PROPAGATION_NESTED));
ctx.transaction(new DBTransaction(transactionStatus));
}
#Override
public void commit(TransactionContext ctx) throws DataAccessException {
txMgr.commit(((DBTransaction) ctx.transaction()).transactionStatus);
}
#Override
public void rollback(TransactionContext ctx) throws DataAccessException {
txMgr.rollback(((DBTransaction) ctx.transaction()).transactionStatus);
}
}
This question already has answers here:
Why is my Spring #Autowired field null?
(21 answers)
Closed 8 years ago.
i use the #Autowired to inject some services in the POJOs. The servieces are registred as Beans.
If once POJO is managed by Spring (it is registred as Bean on Spring Config too), so i have no problem with injected services.
But if i created "classic" POJO and create it by "new" , so no Services will be injected.
My question: is it possible to configurate Spring, to realize the #Autowired injection?
I use the Spring 4.0.3 that is configured by JavaConfig:
====
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.core.Ordered;
import org.springframework.http.MediaType;
import org.springframework.security.oauth2.provider.ClientDetailsService;
import org.springframework.security.oauth2.provider.approval.ApprovalStore;
import org.springframework.security.oauth2.provider.token.ConsumerTokenServices;
import org.springframework.security.oauth2.provider.token.TokenStore;
import org.springframework.web.accept.ContentNegotiationManagerFactoryBean;
import org.springframework.web.servlet.View;
import org.springframework.web.servlet.ViewResolver;
import org.springframework.web.servlet.config.annotation.DefaultServletHandlerConfigurer;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
import org.springframework.web.servlet.view.ContentNegotiatingViewResolver;
import org.springframework.web.servlet.view.InternalResourceViewResolver;
import org.springframework.web.servlet.view.json.MappingJackson2JsonView;
import com.tr.oauth.service.PhotoInfo;
import com.tr.oauth.service.PhotoService;
import com.tr.oauth.service.impl.PhotoServiceImpl;
import com.tr.oauth.service.oauth.ExtendedApprovalStoreUserApprovalHandler;
#Configuration
#EnableWebMvc
public class WebMvcConfig extends WebMvcConfigurerAdapter {
#Bean
public static PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer() {
return new PropertySourcesPlaceholderConfigurer();
}
#Bean
public ContentNegotiatingViewResolver contentViewResolver() throws Exception {
ContentNegotiationManagerFactoryBean contentNegotiationManager = new ContentNegotiationManagerFactoryBean();
contentNegotiationManager.addMediaType("json", MediaType.APPLICATION_JSON);
InternalResourceViewResolver viewResolver = new InternalResourceViewResolver();
viewResolver.setPrefix("/WEB-INF/jsp/");
viewResolver.setSuffix(".jsp");
MappingJackson2JsonView defaultView = new MappingJackson2JsonView();
defaultView.setExtractValueFromSingleKeyModel(true);
ContentNegotiatingViewResolver contentViewResolver = new ContentNegotiatingViewResolver();
contentViewResolver.setContentNegotiationManager(contentNegotiationManager.getObject());
contentViewResolver.setViewResolvers(Arrays.<ViewResolver>asList(viewResolver));
contentViewResolver.setDefaultViews(Arrays.<View>asList(defaultView));
return contentViewResolver;
}
#Override
public void addViewControllers(ViewControllerRegistry registry) {
registry.addViewController("/login").setViewName("login");
registry.setOrder(Ordered.HIGHEST_PRECEDENCE);
}
#Bean
public PhotoServiceUserController photoServiceUserController(PhotoService photoService) {
PhotoServiceUserController photoServiceUserController = new PhotoServiceUserController();
return photoServiceUserController;
}
#Bean
public PhotoController photoController(PhotoService photoService) {
PhotoController photoController = new PhotoController();
photoController.setPhotoService(photoService);
return photoController;
}
#Bean
public AccessConfirmationController accessConfirmationController(ClientDetailsService clientDetailsService, ApprovalStore approvalStore) {
AccessConfirmationController accessConfirmationController = new AccessConfirmationController();
accessConfirmationController.setClientDetailsService(clientDetailsService);
accessConfirmationController.setApprovalStore(approvalStore);
return accessConfirmationController;
}
#Bean
public PhotoServiceImpl photoServices() {
List<PhotoInfo> photos = new ArrayList<PhotoInfo>();
photos.add(createPhoto("1", "marissa"));
photos.add(createPhoto("2", "paul"));
photos.add(createPhoto("3", "marissa"));
photos.add(createPhoto("4", "paul"));
photos.add(createPhoto("5", "marissa"));
photos.add(createPhoto("6", "paul"));
PhotoServiceImpl photoServices = new PhotoServiceImpl();
photoServices.setPhotos(photos);
return photoServices;
}
#Bean
public AdminController adminController(TokenStore tokenStore, ConsumerTokenServices tokenServices,
ExtendedApprovalStoreUserApprovalHandler userApprovalHandler) {
AdminController adminController = new AdminController();
adminController.setTokenStore(tokenStore);
adminController.setTokenServices(tokenServices);
adminController.setUserApprovalHandler(userApprovalHandler);
return adminController;
}
private PhotoInfo createPhoto(String id, String userId) {
PhotoInfo photo = new PhotoInfo();
photo.setId(id);
photo.setName("photo" + id + ".jpg");
photo.setUserId(userId);
photo.setResourceURL("/impl/resources/" + photo.getName());
return photo;
}
#Override
public void configureDefaultServletHandling(
DefaultServletHandlerConfigurer configurer) {
configurer.enable();
}
}
-----------
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import org.springframework.util.ClassUtils;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.context.support.AnnotationConfigWebApplicationContext;
import org.springframework.web.filter.DelegatingFilterProxy;
import org.springframework.web.servlet.support.AbstractDispatcherServletInitializer;
/**
* #author Dave Syer
*
*/
public class ServletInitializer extends AbstractDispatcherServletInitializer {
#Override
protected WebApplicationContext createServletApplicationContext() {
AnnotationConfigWebApplicationContext context = new AnnotationConfigWebApplicationContext();
context.scan(ClassUtils.getPackageName(getClass()));
return context;
}
#Override
protected String[] getServletMappings() {
return new String[] { "/" };
}
#Override
protected WebApplicationContext createRootApplicationContext() {
return null;
}
#Override
public void onStartup(ServletContext servletContext) throws ServletException {
super.onStartup(servletContext);
DelegatingFilterProxy filter = new DelegatingFilterProxy("springSecurityFilterChain");
filter.setContextAttribute("org.springframework.web.servlet.FrameworkServlet.CONTEXT.dispatcher");
servletContext.addFilter("springSecurityFilterChain", filter).addMappingForUrlPatterns(null, false, "/*");
}
}
Yes this is possible with the #Configurable annotation and some byte-code magic.
Oliver Gierke the guy behind Spring Data created a nice blog post on this topic:
http://olivergierke.de/2009/05/using-springs-configurable-in-three-easy-steps/
http://aredko.blogspot.de/2011/02/using-configurable-in-spring-framework.html
There is also a question with no accepted answer on SO:
Spring autowiring using #Configurable