Mybatis on h2 database can't insert data - spring-boot

I am use spring boot v1.4.1.RELEASE + mybatis-spring-boot-starter v1.1.1 + h2database v1.4.192 + flywaydb v4.0.3 to build a spring boot web application.
here is my yml config part file or datasource config:
datasource:
km:
driverClassName: org.h2.Driver
url: jdbc:h2:mem:km;MODE=MySQL;
And, when I am use mysql to run my unit test, every unit test was passed. But, when I switch to h2database, the same test was not passed.
The ut code is below:
#RunWith(SpringJUnit4ClassRunner.class)
#SpringBootTest(classes = App.class)
#ActiveProfiles("unit-test")
#Transactional
#Rollback
public class FollowServiceTest {
private int HOST_ID = 1;
private int UID = 100;
#Autowired
private FollowService followService;
#Autowired
private UserFollowDAO followDAO;
#Test
public void testFans() throws Exception {
UserFollow userFollow = new UserFollow();
userFollow.setHostId(HOST_ID);
userFollow.setFollowerId(UID);
followDAO.insert(userFollow);
List<UserFollow> fans = followDAO.findByIndexHostId(HOST_ID, 0, 10);
assertThat("fans is empty", fans, not(empty()));
}
}
UserFollowDAO:
public interface UserFollowDAO {
public static final String COL_ALL = " id, host_id, follower_id, create_time, last_update_time ";
public static final String TABLE = " user_follow ";
#Select(" select " +
COL_ALL +
" from " +
TABLE +
" where " +
"`host_id` = #{hostId} " +
" and id > #{lastId} " +
" limit #{count} "
)
public List<UserFollow> findByIndexHostId(
#Param("hostId") int hostId,
#Param("lastId") int lastId,
#Param("count") int count
);
#Insert(" insert into " + TABLE + " set "
+ " id = #{id}, "
+ " host_id = #{hostId}, "
+ " follower_id = #{followerId}, "
+ " create_time = now(), "
+ " last_update_time = now()")
public int insert(UserFollow bean);
}
Error info:
java.lang.AssertionError: fans is empty
Expected: not an empty collection
but: was <[]>
at org.hamcrest.MatcherAssert.assertThat(MatcherAssert.java:20)
at org.junit.Assert.assertThat(Assert.java:956)
at com.hi.hk.api.service.FollowServiceTest.testFans(FollowServiceTest.java:52)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
Here is my solutions that I have try:
Change datasource config in yml file from url: jdbc:h2:mem:km;MODE=MySQL to url: jdbc:h2:mem:km;MODE=MySQL;LOCK_MODE=1. Not work.
Delete #Transactional and #Rollback annotation from ut code. Not work.
What can I do next to solve this problem?
#pau:
Here is part of my application-unit-test.yml config file:
datasource:
ut: true
km:
driverClassName: org.h2.Driver
url: jdbc:h2:mem:km;MODE=MySQL
And, I have change datasource config as you said:
#Value("${datasource.ut}")
private boolean isUt;
public static final String SQL_SESSION_FACTORY_NAME = "sessionFactoryKm";
public static final String TX_MANAGER = "txManagerKm";
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
#Bean(name = "datasourceKm")
#Primary
#ConfigurationProperties(prefix = "datasource.km")
public DataSource dataSourceKm() {
if (isUt){
EmbeddedDatabaseBuilder builder = new EmbeddedDatabaseBuilder();
return builder.setType(EmbeddedDatabaseType.H2).build();
// Because I am use migration db to execute init sql script, so I haven't set script.
}
return DataSourceBuilder.create().build();
}
Here is flyway bean:
#Bean(name = "KmMigration", initMethod = "migrate")
#Primary
public Flyway flyway() throws SQLException {
logger.info(("================= start km db migration ================="));
Flyway flyway = new Flyway();
flyway.setDataSource(dataSourceKm());
flyway.setBaselineOnMigrate(true);
if (flyway.getDataSource().getConnection().getMetaData().getDatabaseProductName().toString().contains("H2")) {
logger.info(("================= using h2 database to start this app ================="));
flyway.setLocations("/db/migration/ut/km");
} else {
flyway.setLocations("/db/migration/km");
}
// different developer environment. might get different checksum,
// add the statement to skip the checksum error
// TODO: Maintain checksum across all developers environment.
flyway.setValidateOnMigrate(false);
MigrationVersion baselineVersion = flyway.getBaselineVersion();
logger.info(baselineVersion.getVersion());
return flyway;
}

There is some bug in mycode:
#Insert(" insert into " + TABLE + " set "
+ " id = #{id}, "// this line
+ " host_id = #{hostId}, "
+ " follower_id = #{followerId}, "
+ " create_time = now(), "
+ " last_update_time = now()")
public int insert(UserFollow bean);
This insert method will use value of id in bean, that is 0. So, I can't get right value. when I delete this line, this unit test is passed.

Related

Spring , No "Access-Control-Allow-Origin" after repackaging

I did simple repackaging and that caused Access-Control-Allow-Origin issue with my s3 cloud.
I have a local S3 compatible server to store videos , using Spring , am streaming the videos directly from my local cloud .
Everything is working as expected until I tried to repackage my classes .
I had one package com.example.video with the following classes:
S3Config.java this contain the AmazonS3Client
User.java A model class
VideoController.java Simple controller
VideoStreamingServiceApplication.java application class
When I created new package com.example.s3 package and moved both User.java and S3config.java , i had auto wired issue and that was fixed by using component scan as this answer suggested .
Even after the autowired issue fixed am getting an error when I try to stream .
Access to XMLHttpRequest at "http://localhost/9999/recordins/a.m3u8" fom origin 'null' has been block by CORS policy: No 'Access-Control-Allow-Origin' header is present on the request resource .
Although I do have the header mentioned in my request , Here is my VideoController.java
#RestController
#RequestMapping("/cloud")
#ConfigurationProperties(prefix = "amazon.credentials")
public class VideoCotroller {
#Autowired
private S3Config s3Client;
private String bucketName= "recordings";
Logger log = LoggerFactory.getLogger(VideoCotroller.class);
#Autowired
User userData;
#GetMapping(value = "/recordings/{fileName}", produces = { MediaType.APPLICATION_OCTET_STREAM_VALUE })
public ResponseEntity<StreamingResponseBody> streamVideo(HttpServletRequest request, #PathVariable String fileName) {
try {
long rangeStart = 0;
long rangeEnd;
AmazonS3 s3client = s3Client.getAmazonS3Client();
String uri = request.getRequestURI();
System.out.println("Fetching " + uri);
S3Object object = s3client.getObject("recordings", fileName);
long size = object.getObjectMetadata().getContentLength();
S3ObjectInputStream finalObject = object.getObjectContent();
final StreamingResponseBody body = outputStream -> {
int numberOfBytesToWrite = 0;
byte[] data = new byte[(int) size];
while ((numberOfBytesToWrite = finalObject.read(data, 0, data.length)) != -1) {
outputStream.write(data, 0, numberOfBytesToWrite);
}
finalObject.close();
};
rangeEnd = size - 1;
return ResponseEntity.status(HttpStatus.OK)
.header("Content-Type", "application/vnd.apple.mpegurl")
.header("Accept-Ranges", "bytes")
// HERE IS THE ACCESS CONTROL ALLOW ORIGIN
.header("Access-Control-Allow-Origin", "*")
.header("Content-Length", String.valueOf(size))
.header("display", "staticcontent_sol, staticcontent_sol")
.header("Content-Range", "bytes" + " " + rangeStart + "-" + rangeEnd + "/" + size)
.body(body);
//return new ResponseEntity<StreamingResponseBody>(body, HttpStatus.OK);
} catch (Exception e) {
System.err.println("Error "+ e.getMessage());
return new ResponseEntity<StreamingResponseBody>(HttpStatus.BAD_REQUEST);
}}
If I restore the packages to one , as it was before everything is working fine .
MY QUESTION : Why repackaging caused this issue , any idea how to fix this ?

Spring Aspect to log exceptions [duplicate]

This question already has an answer here:
Exception handling and after advice
(1 answer)
Closed 1 year ago.
I added aspect like below in my spring-boot REST API to log calls to all methods in package "com.leanring.sprint" like so:
#Aspect
#Component
public class LogAdvice {
Logger logger = LoggerFactory.getLogger(LogAdvice.class);
#Pointcut(value = "execution(* com.learning.spring.*.*.*(..))")
public void pointCut() {
}
#Around("pointCut()")
public Object appLogger(ProceedingJoinPoint jp) throws Throwable {
ObjectMapper mapper = new ObjectMapper();
String methodName = jp.getSignature().getName();
String className = jp.getTarget().getClass().toString();
Object[] args = jp.getArgs();
logger.info("Start call: " + className + ":" + methodName + "()" + " with arguments: " + mapper.writeValueAsString(args));
Object object = jp.proceed();
logger.info("End call: " + className + ":" + methodName + "()" + " returned: " + mapper.writeValueAsString(object));
return object;
}
}
This is working fine, but I would also like it to be able to log any exceptions that could occur when a method is called.
How do I do that?
I suppose you could add another #AfterThrowing advice using the same pointcut or wrap jp.proceed(); inside a try-catch block.

#Transactional annotation with spring and getting current co

I have a method which has a UPDATE query and a select query. I annotated the method with #Transactional for the below use case.
For concurrent executions - if two users are updating the same table ,I need the update and select query to be run as a unit
If not using #Transactional , I am using jdbc template and i am trying to get the current connection set auto commit to false and commit to true at the end of the method
Issue 1:
Update is getting commited immediately after the statement is executed
Issue 2:
With jdbc template , unable to get the current connection used for transaction .
I have tried the below two ways to get the current connection , but it seems to be a new connection from the pool
1.Connection conn = DataSourceUtils.getConnection(template.getDataSource());
2.Connection con=template.getDataSource().getConnection();
Application deployed in WebLogic Server using using java configuration , created bean for jdbc template , datasource and transaction management and used autowiring
#Transactional
public Integer getNextId(String tablename) {
Integer nextId = null;
int swId = template.update("update " + tablename + " set swId = swId + 1");
//int swId1 = template.update("update " + tablename + " set swId = swId + 1");
if (swId == 1) {
nextId = template.queryForObject("select swId from " + tablename,
int.class);
}
return nextId;
}
}
#Scope("prototype")
#Bean
public DataSource dataSource() throws NamingException {
javax.naming.InitialContext ctx = new javax.naming.InitialContext();
DataSource dataSource = (javax.sql.DataSource) ctx.lookup(dsnName);
return dataSource;
}
#Scope("prototype")
#Bean
public DataSourceTransactionManager dataSourceTransactionManager(DataSource dataSource) {
DataSourceTransactionManager dataSourceTransactionManager = new DataSourceTransactionManager();
dataSourceTransactionManager.setDataSource(dataSource);
return dataSourceTransactionManager;
}
#Scope("prototype")
#Bean
public JdbcTemplate template(DataSource dataSource) {
JdbcTemplate template = new JdbcTemplate(dataSource);
return template;
}
Expected results.
Commit should happen after all the statements in the method is executed
jdbc template need to get the active connection sed for current transaction

data is not insert into db when using spring jpa and spring test

#RunWith(SpringJUnit4ClassRunner.class)
#Transactional
#ContextConfiguration(classes = SimpleConfiguration.class)
public class CustomerSyncRepositoryTest {
#Autowired
CustomerSyncRepository customerSyncRepository;
#Autowired
CustomerRepository customerRepository;
#Autowired
MemberShipRepository memberShipRepository;
#Test
public void collectDataTest() {
CustomerSync customerSyncOne = new CustomerSync();
customerSyncOne.setMembershipNumber("1");
customerSyncRepository.save(customerSyncOne);
Membership membership = new Membership();
membership.setName("钻石");
membership = memberShipRepository.save(membership);
Customer customerOne = new Customer();
customerOne.setMembershipNumber("1");
customerOne.setMembership(membership);
customerRepository.save(customerOne);
Customer customerTwo = new Customer();
customerTwo.setMembershipNumber("2");
customerTwo.setMembership(membership);
customerRepository.save(customerTwo);
customerSyncRepository.collectMissingSyncData();
assertEquals(2, customerSyncRepository.findAll().size());
}
}
there should be 2 records in db, but it seems that the new record it is never inserted into DB. And when I test it by starting up the application, there are 2 records in db. Is there any configruation I miss in unit test? here is what customerSyncRepository.collectMissingSyncData() do.
public interface CustomerSyncRepository extends JpaRepository<CustomerSync, String> {
#Modifying
#Query(nativeQuery = true, value = "insert into customer_sync(membershipNumber,membership, cnName, enName, phoneNum, password, email, firstname, lastname, active, otherType, otherNum ) SELECT " +
"a.membershipNumber, " +
"b.name, " +
"a.cnName, " +
"a.enName, " +
"a.phoneNum ," +
"a.password, " +
"a.email, " +
"a.firstname, " +
"a.lastname, " +
"a.active, " +
"a.otherType, " +
"a.otherNum " +
"FROM " +
"customer a " +
"JOIN membership b ON (a.membership_id = b.id) " +
"WHERE " +
"a.membershipNumber NOT IN (select membershipNumber from customer_sync) ")
void collectMissingSyncData();
List<CustomerSync> findBySync(boolean isSync);
}
The issue is that your saved entities are being stored in the 1st level cache. In other words, they are not yet in the database.
To solve this, just flush the state of the 1st level cache into the database.
One way is to inject the EntityManager into your test class like this...
#PersistenceContext
EntityManager entityManager;
... and then invoke entityManager.flush() after each call to customerRepository.save(..) (or at least once before the call to customerSyncRepository.findAll()).
The other option is simply to invoke the flush() method on the repository itself -- for example, if you're using Spring Data JPA repositories.

Deserializing an interface using Gson?

I'm trying to use Gson with an interface:
public interface Photo {
public int getWidth();
}
public class DinosaurPhoto implements Photo {
...
}
public class Wrapper {
private Photo mPhoto; // <- problematic
}
...
Wrapper wrapper = new Wrapper();
wrapper.setPhoto(new DinosaurPhoto());
Gson gson = new Gson();
String raw = gson.toJson(wrapper);
// Throws an error since "Photo" can't be deserialized as expected.
Wrapper deserialized = gson.fromJson(raw, Wrapper.class);
Since the Wrapper class has a member variable that is of type Photo, how do I go about deserializing it using Gson?
Thanks
Custom deserialization is necessary.
Depending on the larger problem to be solved, either a ["type adapter"] 1 or a "type hierarchy adapter" should be used. The type hierarchy adapter "is to cover the case when you want the same representation for all subtypes of a type".
Simply put, you can't do that with GSON.
I was troubled by the same problem when I stumbled upon Jackson.
With it it is very easy:
ObjectMapper mapper = new ObjectMapper();
mapper.enableDefaultTyping();
And then you can go about de/serializing your Java objects and interfaces without having to write additional custom de/serializers, annotaions and really no added code whatsoever.
This was not a part of the question, but may prove useful if you decide to port from Gson to Jackson.
Gson supports private fields by default but for Jackson you have to include this in your code.
mapper.setVisibilityChecker(g.getVisibilityChecker().with(Visibility.ANY));
Sample implementation for your code in main:
ObjectMapper mapper = new ObjectMapper();
mapper.enableDefaultTyping();
mapper.setVisibilityChecker(g.getVisibilityChecker().with(Visibility.ANY));
Wrapper wrapper = new Wrapper();
wrapper.setPhoto(new DinosaurPhoto());
String wrapper_json = mapper.writeValueAsString(wrapper);
Wrapper wrapper_from_json = mapper.readValue(wrapper_json,Wrapper.class);
Gson promised they will work on this problem in future versions, but they haven't solved it so far.
If this is very important for you application I would suggest that you port to Jackson.
I have built a primitive interface shim generator by way of compiling a groovy properties class to interoperate with a GWT Autobeans model. this is a really rough method to sidestep the ASM/cglib learning curve for now. background on this: with Autobeans, you may only use interfaces, and the sun.* proxies are incapable of gson interop for all the access attempts I have experimented with. BUT, when groovy classloader is local to GsonBuilder, things get a tiny bit easier. note, this fails unless the gsonBuilder registration is actually called from within the groovy itself.
to access the shim factory create one as a singleton names JSON_SHIM and call
JSON_SHIM.getShim("{}",MyInterface.class)
to register if needed and create a [blank] instance. if you have interfaces in your interfaces, you must pre-register those too ahead of use. this is just enough magic to use flat Autobeans with gson, not a whole framework.
there is no groovy code in this generator, so someone with javassist-foo can repeat the experiment.
import com.google.gson.GsonBuilder;
import com.google.gson.InstanceCreator;
import com.google.gson.internal.bind.ReflectiveTypeAdapterFactory;
import groovy.lang.GroovyClassLoader;
import org.apache.commons.beanutils.PropertyUtils;
import java.beans.PropertyDescriptor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import java.util.LinkedHashMap;
import java.util.Map;
public class GroovyGsonShimFactory {
private Map<Class, Method> shimMethods = new LinkedHashMap<>();
private void generateGroovyProxy(Class ifaceClass) {
String shimClassName = ifaceClass.getSimpleName() + "$Proxy";
String ifaceClassCanonicalName = ifaceClass.getCanonicalName();
String s = "import com.google.gson.*;\n" +
"import org.apache.commons.beanutils.BeanUtils;\n" +
"import java.lang.reflect.*;\n" +
"import java.util.*;\n\n" +
"public class "+shimClassName+" implements "+ifaceClassCanonicalName+" {\n" ;
{
PropertyDescriptor[] propertyDescriptors = PropertyUtils.getPropertyDescriptors(ifaceClass);
for (PropertyDescriptor p : propertyDescriptors) {
String name = p.getName();
String tname = p.getPropertyType().getCanonicalName();
s += "public " + tname + " " + name + ";\n";
s += " " + p.getReadMethod().toGenericString().replace("abstract", "").replace(ifaceClassCanonicalName + ".", "") + "{return " + name + ";};\n";
Method writeMethod = p.getWriteMethod();
if (writeMethod != null)
s += " " + writeMethod.toGenericString().replace("abstract", "").replace(ifaceClassCanonicalName + ".", "").replace(")", " v){" + name + "=v;};") + "\n\n";
}
}
s+= " public static "+ifaceClassCanonicalName+" fromJson(String s) {\n" +
" return (" +ifaceClassCanonicalName+
")cydesign.strombolian.server.ddl.DefaultDriver.gson().fromJson(s, "+shimClassName+".class);\n" +
" }\n" +
" static public interface foo extends InstanceCreator<"+ifaceClassCanonicalName+">, JsonSerializer<"+ifaceClassCanonicalName+">, JsonDeserializer<"+ifaceClassCanonicalName+"> {}\n" +
" static {\n" +
" cydesign.strombolian.server.ddl.DefaultDriver.builder().registerTypeAdapter("+ifaceClassCanonicalName+".class, new foo() {\n" +
" public "+ifaceClassCanonicalName+" deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {\n" +
" return context.deserialize(json, "+shimClassName+".class);\n" +
" }\n" +
"\n" +
" public "+ifaceClassCanonicalName+" createInstance(java.lang.reflect.Type type) {\n" +
" try {\n" +
" return new "+shimClassName+"();\n" +
" } catch (Exception e) {\n" +
" e.printStackTrace(); \n" +
" }\n" +
" return null;\n" +
" }\n" +
"\n" +
" #Override\n" +
" public JsonElement serialize("+ifaceClassCanonicalName+" src, Type typeOfSrc, JsonSerializationContext context) {\n" +
" LinkedHashMap linkedHashMap = new LinkedHashMap();\n" +
" try {\n" +
" BeanUtils.populate(src, linkedHashMap);\n" +
" return context.serialize(linkedHashMap);\n" +
" } catch (Exception e) {\n" +
" e.printStackTrace(); \n" +
" }\n" +
"\n" +
" return null;\n" +
" }\n" +
" });\n" +
" }\n\n" +
"};";
System.err.println("" + s);
ClassLoader parent = DefaultDriver.class.getClassLoader();
GroovyClassLoader loader = new GroovyClassLoader(parent);
final Class gClass = loader.parseClass(s);
try {
Method shimMethod = gClass.getMethod("fromJson", String.class);
shimMethods.put(ifaceClass, shimMethod);
} catch (NoSuchMethodException e) {
e.printStackTrace();
}
}
public <T> T getShim(String json, Class<T> ifaceClass) {
if (!shimMethods.containsKey(ifaceClass))
generateGroovyProxy(ifaceClass);
T shim = null;//= gson().shimMethods(json, CowSchema.class);
try {
shim = (T) shimMethods.get(ifaceClass).invoke(null, json);
} catch (IllegalAccessException | InvocationTargetException e) {
e.printStackTrace();
}
return shim;
}
}

Resources