Model
#NamedStoredProcedureQueries({
#NamedStoredProcedureQuery(name = ApiUrlCheckModel.LogApi, procedureName = ApiUrlCheckModel.LogApi, resultClasses = {
ApiUrlCheckModel.class }, parameters = {
#StoredProcedureParameter(name = "p_nguoidung_id", type = String.class, mode = ParameterMode.IN),
#StoredProcedureParameter(name = "p_identify_code", type = String.class, mode = ParameterMode.IN),
#StoredProcedureParameter(name = "p_menu_id", type = String.class, mode = ParameterMode.IN),
#StoredProcedureParameter(name = "p_api_call_ip", type = String.class, mode = ParameterMode.IN),
#StoredProcedureParameter(name = "p_dest_uri", type = String.class, mode = ParameterMode.IN),
#StoredProcedureParameter(name = "p_gatewaycontext", type = String.class, mode = ParameterMode.IN),
#StoredProcedureParameter(name = "rsout", type = void.class, mode = ParameterMode.REF_CURSOR)
}) })
#Entity
public class ApiUrlCheckModel implements Serializable {
/**
*
*/
private static final long serialVersionUID = 1L;
public static final String LogApi = "admin.log_user.verified_api";
#Id
#Column(name = "IS_GRANTED")
private String is_granted;
public String getIs_granted() {
return is_granted;
}
repository:
#Repository
#Transactional(readOnly = true, propagation = Propagation.SUPPORTS)
public interface ApiUrlCheckRepository extends JpaRepository<ApiUrlCheckModel, Long> {
#Procedure(name = ApiUrlCheckModel.LogApi, outputParameterName = "rsout")
List<ApiUrlCheckModel> logApi(#Param("p_nguoidung_id") String p_nguoidung_id, #Param("p_identify_code") String p_identify_code, #Param("p_menu_id") String p_menu_id
, #Param("p_api_call_ip") String p_api_call_ip, #Param("p_dest_uri") String p_dest_uri, #Param("p_gatewaycontext") String p_gatewaycontext);
}
Oracle stored procedure returns a cursor with one column IS_GRANTED.
But I get an error:
oracle.jdbc.driver.ForwardOnlyResultSet cannot be cast to vn.vnpt.api.certificate.model.ApiUrlCheckModel
Related
I have a spring boot project with apache camel (Using maven dependencies: camel-spring-boot-starter, camel-jpa-starter, camel-endpointdsl).
There are the following 3 entities:
#Entity
#Table(name = RawDataDelivery.TABLE_NAME)
#BatchSize(size = 10)
public class RawDataDelivery extends PersistentObjectWithCreationDate {
protected static final String TABLE_NAME = "raw_data_delivery";
private static final String COLUMN_CONFIGURATION_ID = "configuration_id";
private static final String COLUMN_SCOPED_CALCULATED = "scopes_calculated";
#Column(nullable = false, name = COLUMN_SCOPED_CALCULATED)
private boolean scopesCalculated;
#OneToMany(mappedBy = "rawDataDelivery", fetch = FetchType.LAZY)
private Set<RawDataFile> files = new HashSet<>();
#CollectionTable(name = "processed_scopes_per_delivery")
#ElementCollection(targetClass = String.class)
private Set<String> processedScopes = new HashSet<>();
// Getter/Setter
}
#Entity
#Table(name = RawDataFile.TABLE_NAME)
#BatchSize(size = 100)
public class RawDataFile extends PersistentObjectWithCreationDate {
protected static final String TABLE_NAME = "raw_data_files";
private static final String COLUMN_CONFIGURATION_ID = "configuration_id";
private static final String COLUMN_RAW_DATA_DELIVERY_ID = "raw_data_delivery_id";
private static final String COLUMN_PARENT_ID = "parent_file_id";
private static final String COLUMN_IDENTIFIER = "identifier";
private static final String COLUMN_CONTENT = "content";
private static final String COLUMN_FILE_SIZE_IN_BYTES = "file_size_in_bytes";
#ManyToOne(optional = true, fetch = FetchType.LAZY)
#JoinColumn(name = COLUMN_RAW_DATA_DELIVERY_ID)
private RawDataDelivery rawDataDelivery;
#Column(name = COLUMN_IDENTIFIER, nullable = false)
private String identifier;
#Lob
#Column(name = COLUMN_CONTENT, nullable = true)
private Blob content;
#Column(name = COLUMN_FILE_SIZE_IN_BYTES, nullable = false)
private long fileSizeInBytes;
// Getter/Setter
}
#Entity
#TypeDef(name = "jsonb", typeClass = JsonBinaryType.class)
#Table(name = RawDataRecord.TABLE_NAME, uniqueConstraints = ...)
public class RawDataRecord extends PersistentObjectWithCreationDate {
public static final String TABLE_NAME = "raw_data_records";
static final String COLUMN_RAW_DATA_FILE_ID = "raw_data_file_id";
static final String COLUMN_INDEX = "index";
static final String COLUMN_CONTENT = "content";
static final String COLUMN_HASHCODE = "hashcode";
static final String COLUMN_SCOPE = "scope";
#ManyToOne(optional = false)
#JoinColumn(name = COLUMN_RAW_DATA_FILE_ID)
private RawDataFile rawDataFile;
#Column(name = COLUMN_INDEX, nullable = false)
private long index;
#Lob
#Type(type = "jsonb")
#Column(name = COLUMN_CONTENT, nullable = false, columnDefinition = "jsonb")
private String content;
#Column(name = COLUMN_HASHCODE, nullable = false)
private String hashCode;
#Column(name = COLUMN_SCOPE, nullable = true)
private String scope;
}
What I try to do is to build a route with apache camel which selects all deliveries having the flag "scopesCalculated" == false and calculate/update the scope variable of all records attached to the files of this deliveries. This should happen in one database transaction. If all scopes are updated I want to set the scopesCalculated flag to true and commit the changes to the database (in my case postgresql).
What I have so far is this:
String r3RouteId = ...;
var dataSource3 = jpa(RawDataDelivery.class.getName())
.lockModeType(LockModeType.NONE)
.delay(60).timeUnit(TimeUnit.SECONDS)
.consumeDelete(false)
.query("select rdd from RawDataDelivery rdd where rdd.scopesCalculated is false and rdd.configuration.id = " + configuration.getId())
;
from(dataSource3)
.routeId(r3RouteId)
.routeDescription(configuration.getName())
.messageHistory()
.transacted()
.process(exchange -> {
RawDataDelivery rawDataDelivery = exchange.getIn().getBody(RawDataDelivery.class);
rawDataDelivery.setScopesCalculated(true);
})
.transform(new Expression() {
#Override
public <T> T evaluate(Exchange exchange, Class<T> type) {
RawDataDelivery rawDataDelivery = exchange.getIn().getBody(RawDataDelivery.class);
return (T)rawDataDelivery.getFiles();
}
})
.split(bodyAs(Iterator.class)).streaming()
.transform(new Expression() {
#Override
public <T> T evaluate(Exchange exchange, Class<T> type) {
RawDataFile rawDataFile = exchange.getIn().getBody(RawDataFile.class);
// rawDataRecordJpaRepository is an autowired interface by spring with the following method:
// #Lock(value = LockModeType.NONE)
// Stream<RawDataRecord> findByRawDataFile(RawDataFile rawDataFile);
// we may have many records per file (100k and more), so we don't want to keep them all in memory.
// instead we try to stream the resultset and aggregate them by 500 partitions for processing
return (T)rawDataRecordJpaRepository.findByRawDataFile(rawDataFile);
}
})
.split(bodyAs(Iterator.class)).streaming()
.aggregate(constant("all"), new GroupedBodyAggregationStrategy())
.completionSize(500)
.completionTimeout(TimeUnit.SECONDS.toMillis(5))
.process(exchange -> {
List<RawDataRecord> rawDataRecords = exchange.getIn().getBody(List.class);
for (RawDataRecord rawDataRecord : rawDataRecords) {
rawDataRecord.setScope("abc");
}
})
;
Basically this is working, but I have the problem that the records of the last partition will not be updated. In my example I have 43782 records but only 43500 are updated. 282 remain with scope == null.
I really don't understand the JPA transaction and session management of camel and I can't find some examples on how to update JPA/Hibernate entities with camel (without using SQL component).
I already tried some solutions but none of them are working. Most attempts end with "EntityManager/Session closed", "no transaction is in progress" or "Batch update failed. Expected result 1 but was 0", ...
I tried the following:
to set jpa(...).joinTransaction(false).advanced().sharedEntityManager(true)
use .enrich(jpa(RawDataRecord.class.getName()).query("select rec from RawDataRecord rec where rawDataFile = ${body}")) instead of .transform(...) with JPA repository for the records
using hibernate session from camel headers to update/save/flush entities: "Session session = exchange.getIn().getHeader(JpaConstants.ENTITY_MANAGER, Session.class);"
try to update over new jpa component at the end of the route:
.split(bodyAs(Iterator.class)).streaming()
.to(jpa(RawDataRecord.class.getName()).usePersist(false).flushOnSend(false))
Do you have any other ideas / recommendations?
Hello I have the Following mapstruct mapping description.
#Mapper(componentModel = "spring", uses = {
GroupResolver.class }, unmappedTargetPolicy = ReportingPolicy.IGNORE, unmappedSourcePolicy = ReportingPolicy.IGNORE)
public abstract class GroupMapper {
#Autowired
private UserMapper userMapper;
#Autowired
private ChainMapper chainMapper;
#Mapping(target = "id", source = "id")
#Mapping(target = "name", source = "name")
#Mapping(target = "chains", expression = "java(mapChain(group))")
#Mapping(target = "users", expression = "java(mapUsers(group))")
public abstract GroupResp toModel(final Group group);
public Set<ChainResp> mapChain(final Group group) {
return chainMapper.toModelSet(group.getChains());
}
public Set<UserResp> mapUsers(final Group group) {
return userMapper.toModelSet(group.getUsers());
}
#Mapping(target = "id", source = "id")
#Mapping(target = "name", source = "name")
#Mapping(target = "chains", ignore = true)
#Mapping(target = "users", ignore = true)
#Mapping(target = "users.chainUnixPaths" , ignore = true )
public abstract Group toEntity(final GroupResp groupeResp);
public Set<Chain> mapChains(final GroupResp groupeResp) {
return chainMapper.toEntitySet(groupeResp.getChains());
}
public Set<User> mapUsers(final GroupResp groupeResp) {
return userMapper.toEntitySet(groupeResp.getUsers());
}
}
I m facing a compile time error stating that :
No target bean properties found: can't map Collection element "UnixPathResp users[].chainUnixPaths" to "UnixPath users[].chainUnixPaths". Consider to declare/implement a mapping method: "UnixPath map(UnixPathResp value)".
AND
No target bean properties found: can't map property "ServerResp users[].server" to "Server users[].server". Consider to declare/implement a mapping method: "Server map(ServerResp value)".
I assume that my Ignores are not Working Or my resolver is somehow fuzy.
Here is all related classes
#Entity
#Table(name = "groups")
public class Group implements Serializable {
private static final long serialVersionUID = 6980925916410978160L;
#Id
#GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "SEQ_GROUPS")
#SequenceGenerator(name = "SEQ_GROUPS", sequenceName = "SEQ_GROUPS")
#Column(name = "GROUP_ID")
private Long id;
#Column(name = "GROUP_NAME", length = 256, nullable = false)
private String name;
#ManyToMany(fetch = FetchType.LAZY)
#JoinTable(name = "CHAIN_GROUP", joinColumns = #JoinColumn(name = "GROUP_ID"), inverseJoinColumns = #JoinColumn(name = "CHAIN_ID"))
private Set<Chain> chains = new HashSet<>();
#ManyToMany(fetch = FetchType.LAZY)
#JoinTable(name = "USER_GROUP", joinColumns = #JoinColumn(name = "GROUP_ID"), inverseJoinColumns = #JoinColumn(name = "USER_ID"))
private Set<User> users = new HashSet<>();
AND
public class GroupResp implements Serializable{
/**
*
*/
private static final long serialVersionUID = 7184268214689299357L;
#JsonProperty("GROUP_ID")
private Long id;
#NotBlank
#JsonProperty("GROUP_NAME")
private String name;
#JsonProperty("CHAIN_GROUP")
private Set<ChainResp> chains = new HashSet<>();
#JsonProperty("USER_GROUP")
private Set<UserResp> users = new HashSet<>();
Then my Generic Resolver
#Component
public class GroupResolver extends GenericPFResolveContract<GroupResp, Group, Long> {
#Override
public Long getIdof(GroupResp s) {
return s.getId();
}
#Override
#ObjectFactory
public Group resolve(GroupResp s,#TargetType Class<Group> ts) throws InstantiationException, IllegalAccessException {
return super.resolve(s, ts);
}
}
With
#Component
public abstract class GenericPFResolveContract<S,T,K> implements PFResolveContract<T, S> {
#Autowired
protected JpaRepository<T, K> sourceRepository;
#Override
#ObjectFactory
#Transactional
public T resolve(S s,#TargetType Class<T> ts) throws InstantiationException, IllegalAccessException {
if(s == null || getIdof(s) == null) {
return ts.newInstance();
}
return sourceRepository.findById(getIdof(s)).orElseGet(()-> {
T newInstance = null;
try {
newInstance = ts.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
e.printStackTrace();
}
return newInstance;
} );
}
public abstract K getIdof(S s);
}
I am trying to map only certain fields to the entity object using a native SQL query :
#NamedNativeQuery(name = "CustomerEntity.findOnlyNameAndPhoneFromCustomer", query = "select customer_name, customer_email from customer",
resultSetMapping = "nativeMapping")
#SqlResultSetMappings(value = {
#SqlResultSetMapping(name = "nativeMapping",
entities = {
#EntityResult(
entityClass = CustomerEntity.class,
fields = {
#FieldResult(name = "name", column = "customer_name"),
#FieldResult(name = "email", column = "customer_email")
}
)})})
#Entity
class CustomerEntity {
//getter and setter fields
#Column(name="customer_name")
private String name;
#Column(name="customer_email")
private String email;
#Column(name="address")
private String adddress;
#Id
#GeneratedValue(generator = "uuid2")
#GenericGenerator(name = "uuid2", strategy = "uuid2")
#Access(AccessType.PROPERTY)
#Column(columnDefinition = "VARCHAR(36)", name = "customer_guid")
#Type(type = "uuid-char")
private UUID guid;
#Embedded
private AuditFields audit;
}
Repository:
#Query(nativeQuery = true)
List<CustomerEntity> findOnlyNameAndPhoneFromCustomer();
I am not trying to map all the fields that are present in the customer table to CustomerEntity, I am only projecting certain fields.
This is giving me errors like:
17:44:37.841 [ERROR] o.h.e.j.s.SqlExceptionHelper - The column name address2_6_0_ is not valid.
There is no column called address2_6_0_ in my table, but there is a column called address, why is the address column being renamed and referenced here ?
I am only referencing customer_name and customer_email.
What is going on ?
Thanks.
entities is for "mapping to entities".
#EntityResult:
If this annotation is used, the SQL statement should select all of the columns that are mapped to the entity object.
Therefore, you should use classes and #ConstructorResult for "mapping to DTOs".
#NamedNativeQuery(name = "CustomerEntity.findOnlyNameAndPhoneFromCustomer",
query = "select customer_name, customer_email from customer",
resultSetMapping = "nativeMapping")
#SqlResultSetMappings(value = {
#SqlResultSetMapping(name = "nativeMapping",
classes = #ConstructorResult(columns = { #ColumnResult(name = "customer_name"), #ColumnResult(name = "customer_email") },
targetClass = CustomerEntity.class)) })
#Entity
public class CustomerEntity {
public CustomerEntity() {
}
public CustomerEntity(String name, String email) {
this.name = name;
this.email = email;
}
...
}
I am studying spring boot data using this API SWAPI, I did almost things but now I dont know how to map the relationship about two lists, above you can see my code and entities.
Entity Film
#Data
#Entity
public class Film extends Persistent<Long> {
private String title;
#JsonProperty(value = "episode_id")
private int episodeId;
#JsonProperty(value = "opening_crawl")
#Column(columnDefinition = "CLOB")
private String openingCrawl;
private String director;
private String producer;
#JsonDeserialize(converter = StringToLocalDateConverter.class)
#JsonProperty(value = "release_date")
private LocalDate releaseDate;
#JsonDeserialize(converter = ApiURLToEntitiesConverter.class)
#ManyToMany(mappedBy = "films")
private List<Person> characters;
#JsonDeserialize(converter = StringToLocalDateTimeConverter.class)
private LocalDateTime created;
#JsonDeserialize(converter = StringToLocalDateTimeConverter.class)
private LocalDateTime edited;
private String url;
}
Entity Person
#Data
#Entity
public class Person extends Persistent<Long> {
private String name;
private String height;
private String mass;
#JsonProperty(value = "hair_color")
private String hairColor;
#JsonProperty(value = "skin_color")
private String skinColor;
#JsonProperty(value = "eye_color")
private String eyeColor;
#JsonProperty(value = "birth_year")
private String birthYear;
private String gender;
#JsonDeserialize(converter = ApiURLToEntityConverter.class)
#JoinColumn(name = "planet_id", foreignKey = #javax.persistence.ForeignKey(name = "none"))
#OneToOne(optional = true)
private Planet homeworld;
#JsonDeserialize(converter = ApiURLToEntitiesConverter.class)
#ManyToMany
#JoinTable(
name = "film_person",
joinColumns = #JoinColumn(name = "film_fk", referencedColumnName = "id", nullable = true),
inverseJoinColumns = #JoinColumn(name = "person_fk", referencedColumnName = "id", nullable = true))
private List<Film> films;
#JsonDeserialize(converter = StringToLocalDateTimeConverter.class)
private LocalDateTime created;
#JsonDeserialize(converter = StringToLocalDateTimeConverter.class)
private LocalDateTime edited;
private String url;
}
I am trying to use the spring jpa method to saveAll
#Override
public List<T> insertAll(List<T> entities) {
for (Persistent entity : entities) {
Set<ConstraintViolation<Persistent>> violations = validator.validate(entity);
if (violations != null && !violations.isEmpty()) {
throw new ConstraintViolationException(violations);
}
}
return repository.saveAll(entities);
}
Converter Method
#Override
public List convert(List<String> s) {
if (s == null || s.isEmpty()) {
return null;
}
List objetos = new LinkedList();
for (String url : s) {
if (url.contains("people")) {
objetos.add(Util.getPerson(url));
}
if (url.contains("planets")) {
objetos.add(Util.getPlanet(url));
}
if (url.contains("starships")) {
objetos.add(Util.getStarship(url));
}
if (url.contains("vehicles")) {
objetos.add(Util.getVehicle(url));
}
if (url.contains("species")) {
objetos.add(Util.getSpecie(url));
}
}
return objetos;
}
}
Util method
public static Person getPerson(String characterApiUrl) {
if (characterApiUrl == null || characterApiUrl.isEmpty()) {
return null;
}
Person person = new Person();
person.setId(StringUtil.getIdEntity(characterApiUrl, "people/"));
return person;
}
The relationship table is being created but no populated
I'm using the Spring Data JPA Specifications for Filtering data.
But When i'm hitting this URL http://localhost:9091/api/student/all?salary_like=1500
if i'm filtering through name also getting empty contents.
i'm getting the empty contents here.
But in eclipse console it's generating the correct query:
Hibernate: select student0_.id as id1_0_, student0_.address as address2_0_, student0_.age as age3_0_, student0_.name as name4_0_, student0_.salary as salary5_0_ from student_data_with_projection student0_ where (student0_.name like ?) and (student0_.age like ?) and (student0_.address like ?) and student0_.salary=1500.0 limit ?
And My code is:
POJO
#Data
#Component
#NoArgsConstructor
#AllArgsConstructor
#Entity
#Setter
#Getter
#Table(name = "student_data_with_projection")
public class Student {
#Id
#Column(name = "id")
#GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
#Column(name = "name")
private String name;
#Column(name = "age")
private int age;
#Column(name = "salary")
private Float salary;
#Column(name = "address")
private String address;
}
Controller
#RestController
#RequestMapping(path = "api/student/") //This is a Base URL in Our Controller.
public class StudentController {
#Autowired
StudentRepository studentRepository;
#GetMapping(path = "all")
public #ResponseBody
Iterable<Student> getAllStudentWIthProjection(#RequestParam(required = false, defaultValue = "") String name_like,
#RequestParam(required = false, defaultValue = "") int age_like,
#RequestParam(required = false) Float salary_like,
#RequestParam(required = false, defaultValue = "") String address_like,
#RequestParam(required = false, defaultValue = "0") int pageNum,
#RequestParam(required = false, defaultValue = "20") int pageSize) {
StudentSpecification spec1 =
new StudentSpecification(new SearchCriteria("name", ":", name_like));
StudentSpecification spec2 =
new StudentSpecification(new SearchCriteria("age", ":", age_like));
StudentSpecification spec3 =
new StudentSpecification(new SearchCriteria("address", ":", address_like));
Specification<Student> specGroup = Specification.where(spec1).and(spec2).and(spec3);
if (salary_like != null) {
StudentSpecification spec4 =
new StudentSpecification(new SearchCriteria("salary", ":", salary_like));
specGroup = specGroup.and(spec4);
}
Page<Student> findAll = studentRepository.findAll(specGroup, PageRequest.of(pageNum, pageSize));
return findAll;
}
}
Repository
public interface StudentRepository extends PagingAndSortingRepository<Student, Long>, JpaSpecificationExecutor<Student> {
}
Specification
#AllArgsConstructor
public class StudentSpecification implements Specification<Student> {
private SearchCriteria criteria;
public StudentSpecification(SearchCriteria searchCriteria) {
super();
this.criteria=searchCriteria;
}
public SearchCriteria getCriteria() {
return criteria;
}
#Override
public Predicate toPredicate
(Root<Student> root, CriteriaQuery<?> query, CriteriaBuilder builder) {
if (criteria.getOperation().equalsIgnoreCase(">")) {
if (root.get(criteria.getKey()).getJavaType() == Date.class) {
return builder.greaterThanOrEqualTo(root.<Date>get(criteria.getKey()), (Date)criteria.getValue());
} else {
return builder.greaterThanOrEqualTo(
root.<String> get(criteria.getKey()), criteria.getValue().toString());
}
}
else if (criteria.getOperation().equalsIgnoreCase("<")) {
if (root.get(criteria.getKey()).getJavaType() == Date.class) {
return builder.lessThanOrEqualTo(root.<Date>get(criteria.getKey()), (Date)criteria.getValue());
} else {
return builder.lessThanOrEqualTo(
root.<String> get(criteria.getKey()), criteria.getValue().toString());
}
}
else if (criteria.getOperation().equalsIgnoreCase(":")) {
if (root.get(criteria.getKey()).getJavaType() == String.class) {
return builder.like(
root.<String>get(criteria.getKey()), "%" + criteria.getValue() + "%");
} else {
return builder.equal(root.get(criteria.getKey()), criteria.getValue());
}
}
return null;
}
}
Criteria
#Data
#AllArgsConstructor
public class SearchCriteria {
private String key;
private String operation;
private Object value;
public SearchCriteria(String key, String operation, Object value) {
super();
this.key = key;
this.operation = operation;
this.value = value;
}
/* Getter and Setter */
}
I have uploaded the code in GitHub: https://github.com/avinashm294/Filters.git
How can i fix this to filter my data.
I had used the #Getter and #Setter annotation of lombok in the POJO which was not working. After Adding the getter and setter explicitly now it's working.
the like in the query work as =
you need to append % before and after the name
example
StudentSpecification spec1 =
new StudentSpecification(new SearchCriteria("name", ":", "%"+name_like+"%"));