How to handle vast amount of http get request in spring boot - spring-boot

I am trying to request 1676 http get request in spring boot.
One http get request takes about 6 second.
So if I request 1676 get request it will take about 3 hours.
How can I reduce http reqeust Time??
The code below is the code that is requesting one http get reqeust.
And I'm trying to request api by using the information that is in the List which have 1676 amount of info.
package com.wook.controller;
import java.time.Duration;
import org.mybatis.spring.annotation.MapperScan;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.http.HttpStatus;
import org.springframework.http.client.reactive.ReactorClientHttpConnector;
import org.springframework.web.reactive.function.client.WebClient;
import org.springframework.web.util.DefaultUriBuilderFactory;
import com.wook.model.dto.ApiKey;
import com.wook.model.dto.Item;
import com.wook.model.dto.Items;
import com.wook.model.dto.SweatherRootRes;
import com.wook.model.dto.Temperature;
import com.wook.service.GeoService;
import reactor.core.publisher.Mono;
import reactor.netty.http.client.HttpClient;
#SpringBootApplication(scanBasePackages= {"com.wook.model","com.wook.weather","com.wook.service"})
#MapperScan("com.wook.model.dao")
public class Application implements CommandLineRunner{
private final static String BASE_URL = "http://apis.data.go.kr/1360000/VilageFcstInfoService_2.0";
private final ApiKey APK;
private SweatherRootRes result;
private Temperature temp;
private GeoService gs; //To get GeoService
private Logger logger = LoggerFactory.getLogger(Application.class); //로그를 찍기 위해서 사용하는 Class
#Autowired
public Application(ApiKey apk, SweatherRootRes result,Temperature temp, GeoService gs) {
this.APK = apk;
this.result = result;
this.temp = temp;
this.gs = gs;
}
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
#Override
public void run( String... args ) throws Exception {
String serviceKey = APK.getApiKey();
String pageNo = "1";
String numOfRows = "290";
String dataType = "JSON";
String base_date = "20211110";
String base_time = "2300";
String nx = "55";
String ny = "127";
DefaultUriBuilderFactory factory = new DefaultUriBuilderFactory(BASE_URL); //UriBuilder를 생성하는 옵션을 설정하는 DefaultUriBuilderFactory 인스턴스 생성
factory.setEncodingMode(DefaultUriBuilderFactory.EncodingMode.VALUES_ONLY); //encoding 모드 설정
HttpClient client = HttpClient.create()
.responseTimeout(Duration.ofSeconds(60)); //http response timeout 설정을 60초로 설정함
WebClient wc = WebClient.builder()
.uriBuilderFactory(factory) //위에서 만든 uri 인코딩 설정으로 uribuilder 설정을 함
.baseUrl(BASE_URL) //baseURI 설정하고
.clientConnector(new ReactorClientHttpConnector(client)) //위에서 만든 타임아웃 설정을 적용시키고
.build(); //빌드한다.
// This part is the part that approach to a List<GeoInfo>
// for(GeoInfo gi : gs.getGeoXY()) {
// logger.info(gi.toString());
// }
Mono<SweatherRootRes> response = wc.get()
.uri(uriBuilder -> uriBuilder.path("/getVilageFcst")
.queryParam("serviceKey", serviceKey)
.queryParam("numOfRows", numOfRows)
.queryParam("pageNo", pageNo)
.queryParam("dataType", dataType)
.queryParam("base_date", base_date)
.queryParam("base_time", base_time)
.queryParam("nx", nx) //지역정보
.queryParam("ny", ny) //지역정보
.build()) //위 쿼리들로 uri 빌드를 하고
.retrieve() //http 요청하고
.onStatus(HttpStatus::is4xxClientError,
error -> Mono.error(new RuntimeException("API not found")))
.onStatus(HttpStatus::is5xxServerError,
error -> Mono.error(new RuntimeException("Server is not responding")))
.bodyToMono(SweatherRootRes.class);//Mono로 값을 받고
//비동기 방식으로 약간 콜백 메소드와 같은 역할을 하는것 같다.그래서 이부분은 api 연결이 성공했을때 들어오는 부분인것 같다.
response.subscribe(res -> {
result.setResponse(res.getResponse());
if(result.getResponse().getBody()!= null) {
logger.info("http request success");
getTemp(result.getResponse().getBody().getItems());
logger.info(temp.toString());
}else {
logger.error("http reqeust has failed");
}
});
}
public void getTemp(Items items) {
int count = 0;
for(Item item : items.getItem()) {
if(count >= 2 )
break;
if(item.getCategory().equals("TMX")) {
temp.setBestMax(item.getFcstValue());
temp.setNx(item.getNx());
temp.setNy(item.getNy());
temp.setDate(item.getBaseDate());
count++;
}
if(item.getCategory().equals("TMN")){
temp.setBestMin(item.getFcstValue());
count++;
}
}
}
}

Related

Index data to elasticssearch cloud from Aws DynamoDB using java

I want to index the data from dynamo DB to elasticsearch cloud SAAS. My elasticsearch endpoint will be like
elasticsearch endpoint URL
https://d9bc7cbca5ec49ea96a6ea683f70caca.eastus2.azure.elastic-cloud.com:1234
When ever if the CRUD operation happened in AWS dynamo db immediately it should get reflected in the index of elasticsearch cloud.
May i know the best way to achieve this?
Hi All thanks for all your support. I have attached the sample code it does the magic, I am able to index the data from dynamo db to elasticsearch both inside AWS and outside but we have to set a proper permission to lambda function as suggested in above comments.
package com.Firstlambda;
import com.amazonaws.services.lambda.runtime.Context;
import com.amazonaws.services.lambda.runtime.RequestHandler;
import com.amazonaws.auth.AWS4Signer;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.amazonaws.services.dynamodbv2.document.Item;
import com.amazonaws.services.dynamodbv2.document.ItemUtils;
import com.amazonaws.services.dynamodbv2.model.AttributeValue;
import com.amazonaws.services.lambda.runtime.events.DynamodbEvent;
import com.amazonaws.services.lambda.runtime.events.DynamodbEvent.DynamodbStreamRecord;
import org.apache.http.HttpHost;
import org.apache.http.HttpRequestInterceptor;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.xcontent.XContentType;
import org.json.JSONObject;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
public class HelloWorld implements RequestHandler<DynamodbEvent, String> {
private static String serviceName = "es";
private static String region = "us-east-1";
private static String aesEndpoint = ""
private static String index = "";
private static String type = "_doc";
static final AWSCredentialsProvider credentialsProvider = new DefaultAWSCredentialsProviderChain();
public String handleRequest(DynamodbEvent ddbEvent, Context context) {
for (DynamodbStreamRecord record : ddbEvent.getRecords()) {
System.out.println("EventName : " + record.getEventName());
System.out.println("EventName : " + record.getDynamodb());
//AWS outside
RestHighLevelClient esClient = esClient();
//AWS outside
//AWS Inside
//RestHighLevelClient esClient = esClient(serviceName, region);
//AWS Inside
if (record.getEventName().toLowerCase().equals("insert")) {
String JsonString = getJsonstring(record.getDynamodb().getNewImage());
String JsonUniqueId = GetIdfromJsonString(JsonString);
IndexRequest indexRequest = new IndexRequest(index, type, JsonUniqueId);
indexRequest.source(JsonString, XContentType.JSON);
try {
IndexResponse indexResponse = esClient.index(indexRequest, RequestOptions.DEFAULT);
System.out.println(indexResponse.toString());
return "Successfully processed " + ddbEvent.getRecords().size() + " records.";
} catch (IOException e) {
System.out.println(e.getMessage());
}
} else if (record.getEventName().toLowerCase().equals("modify")) {
String JsonString = getJsonstring(record.getDynamodb().getNewImage());
String JsonUniqueId = GetIdfromJsonString(JsonString);
UpdateRequest request = new UpdateRequest(index, type, JsonUniqueId);
String jsonString = JsonString;
request.doc(jsonString, XContentType.JSON);
try {
UpdateResponse updateResponse = esClient.update(
request, RequestOptions.DEFAULT);
System.out.println(updateResponse.toString());
return "Successfully processed " + ddbEvent.getRecords().size() + " records.";
} catch (IOException e) {
System.out.println(e.getMessage());
}
} else {
System.out.println("remove");
System.out.println("KEYID : " + record.getDynamodb().getKeys().get("ID").getN());
String deletedId = record.getDynamodb().getKeys().get("ID").getN();
DeleteRequest request = new DeleteRequest(index, type, deletedId);
try {
DeleteResponse deleteResponse = esClient.delete(
request, RequestOptions.DEFAULT);
} catch (IOException e) {
System.out.println(e.getMessage());
}
}
}
return "Successfullyprocessed";
}
public String getJsonstring(Map<String, AttributeValue> newIma) {
String json = null;
Map<String, AttributeValue> newImage = newIma;
List<Map<String, AttributeValue>> listOfMaps = new ArrayList<Map<String, AttributeValue>>();
listOfMaps.add(newImage);
List<Item> itemList = ItemUtils.toItemList(listOfMaps);
for (Item item : itemList) {
json = item.toJSON();
}
return json;
}
public String GetIdfromJsonString(String Json) {
JSONObject jsonObj = new JSONObject(Json);
return String.valueOf(jsonObj.getInt("ID"));
}
// Adds the interceptor to the ES REST client
// public static RestHighLevelClient esClient(String serviceName, String region) {
// AWS4Signer signer = new AWS4Signer();
// signer.setServiceName(serviceName);
// signer.setRegionName(region);
// HttpRequestInterceptor interceptor = new AWSRequestSigningApacheInterceptor(serviceName, signer, credentialsProvider);
// return new RestHighLevelClient(RestClient.builder(HttpHost.create(aesEndpoint)).setHttpClientConfigCallback(hacb -> hacb.addInterceptorLast(interceptor)));
// }
public static RestHighLevelClient esClient() {
String host = "d9bc7cbca5ec49ea9gfde6a6ea683f70caca.eastus2.azure.elastic-cloud.com";
int port = 9200;
String userName = "elastic";
String password = "L4Nfnle3wxsfgjgLmV95lPiUb46hp";
String protocol = "https";
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY,
new UsernamePasswordCredentials(userName, password));
RestClientBuilder builder = RestClient.builder(new HttpHost(host, port, protocol))
.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider));
RestHighLevelClient client = new RestHighLevelClient(builder);
return client;
}
}

Confluent Kafka Avro deserializer for spring boot kafka listener

Does somebody implemented confluent-kafka messages deserializer to consume kafka messages by spring "#KafkaListener"-s ?
Here is my answer, which I've implemented based on: "io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer"
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Map;
import javax.xml.bind.DatatypeConverter;
import org.apache.avro.Schema;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.avro.specific.SpecificRecordBase;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Deserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AvroConfluentDeserializer<T extends SpecificRecordBase> implements Deserializer<T> {
private static final Logger LOG = LoggerFactory.getLogger(AvroConfluentDeserializer.class);
protected static final byte MAGIC_BYTE = 0x0;
protected static final int idSize = 4;
private final DecoderFactory decoderFactory = DecoderFactory.get();
protected final Class<T> targetType;
public AvroConfluentDeserializer(Class<T> targetType) {
this.targetType = targetType;
}
#Override
public void close() {
// No-op
}
#Override
public void configure(Map<String, ?> arg0, boolean arg1) {
// No-op
}
#Override
public T deserialize(String topic, byte[] data) {
try {
T result = null;
if (data != null) {
LOG.info("data='{}'", DatatypeConverter.printHexBinary(data));
result = (T) deserializePayload(data, targetType.newInstance().getSchema());
LOG.info("deserialized data='{}'", result);
}
return result;
} catch (Exception ex) {
throw new SerializationException(
"Can't deserialize data '" + Arrays.toString(data) + "' from topic '" + topic + "'", ex);
}
}
protected T deserializePayload(byte[] payload, Schema schema) throws SerializationException {
int id = -1;
try {
ByteBuffer buffer = getByteBuffer(payload);
id = buffer.getInt();
int length = buffer.limit() - 1 - idSize;
int start = buffer.position() + buffer.arrayOffset();
DatumReader<T> reader = new SpecificDatumReader<T>(schema);
return reader.read(null, decoderFactory.binaryDecoder(buffer.array(), start, length, null));
} catch (IOException | RuntimeException e) {
throw new SerializationException("Error deserializing Avro message for id " + id, e);
}
}
private ByteBuffer getByteBuffer(byte[] payload) {
ByteBuffer buffer = ByteBuffer.wrap(payload);
if (buffer.get() != MAGIC_BYTE) {
throw new SerializationException("Unknown magic byte!");
}
return buffer;
}
}

Is this ClassCastException a HtmlUnit bug?

I'm new to htmlunit (2.23) and I can't get this test to work:
I'm getting this ClassCastException thrown out of HtmlUnit and I don't know if it is a bug, or if I am doing something wrong.
java.lang.ClassCastException: com.gargoylesoftware.htmlunit.TextPage cannot be cast to com.gargoylesoftware.htmlunit.html.HtmlPage
at com.gargoylesoftware.htmlunit.WebClient.makeWebResponseForJavaScriptUrl(WebClient.java:1241)
at com.gargoylesoftware.htmlunit.WebClient.getPage(WebClient.java:375)
at com.gargoylesoftware.htmlunit.WebClient.getPage(WebClient.java:304)
at com.gargoylesoftware.htmlunit.WebClient.getPage(WebClient.java:451)
at com.gargoylesoftware.htmlunit.WebClient.getPage(WebClient.java:436)
at org.wyttenbach.dale.mlec.OutageTest.test(OutageTest.java:46)
...
The code
import java.awt.Desktop;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.junit.Assert;
import org.junit.Test;
import com.gargoylesoftware.htmlunit.FailingHttpStatusCodeException;
import com.gargoylesoftware.htmlunit.JavaScriptPage;
import com.gargoylesoftware.htmlunit.NicelyResynchronizingAjaxController;
import com.gargoylesoftware.htmlunit.Page;
import com.gargoylesoftware.htmlunit.TextPage;
import com.gargoylesoftware.htmlunit.WebClient;
import com.gargoylesoftware.htmlunit.html.DomElement;
import com.gargoylesoftware.htmlunit.html.HtmlPage;
public class OutageTest {
private static final String SITE_URL = "https://ebill.mlecmn.net/woViewer/";
private static final String OUTAGE_MAP_URL = SITE_URL + "mapviewer.html?config=Outage+Web+Map";
#Test
public void test() throws FailingHttpStatusCodeException, MalformedURLException, IOException {
try (final WebClient webClient = new WebClient()) {
webClient.waitForBackgroundJavaScript(20000);
webClient.setAjaxController(new NicelyResynchronizingAjaxController());
webClient.getOptions().setUseInsecureSSL(true);
Map<String, Page> urls = new HashMap<String, Page>();
LinkedList<String> urlsToVisit = new LinkedList<String>();
urlsToVisit.add(OUTAGE_MAP_URL);
while (!urlsToVisit.isEmpty()) {
String url = urlsToVisit.remove();
if (urls.containsKey(url)) {
continue;
}
Page page = webClient.getPage(url);
urls.put(url, page);
if (page instanceof HtmlPage) {
HtmlPage page2 = (HtmlPage) page;
System.err.println("================================================================");
System.err.println(page2.asXml());
System.err.println("================================================================");
Assert.assertFalse("Outage in Nordland township: " + url, page2.asText().contains("Nordland"));
urlsToVisit.addAll(extractLinks(page2));
} else if (page instanceof JavaScriptPage) {
JavaScriptPage page2 = (JavaScriptPage) page;
Assert.assertFalse("Outage in Nordland township: " + url, page2.getContent().contains("Nordland"));
} else if (page instanceof TextPage) {
TextPage page2 = (TextPage) page;
Assert.assertFalse("Outage in Nordland township: " + url, page2.getContent().contains("Nordland"));
} else {
System.err.println(String.format("%s => %s", url, page.getClass().getName()));
}
}
} catch (AssertionError e) {
reportOutage();
throw e;
}
}
private Collection<String> extractLinks(HtmlPage page) {
List<String> links = new ArrayList<String>();
for (DomElement x : page.getElementsByTagName("script")) {
String src = x.getAttribute("src");
if (!src.contains(":")) {
src = SITE_URL + src;
System.err.println("script src="+src);
}
links.add(src);
}
for (DomElement x : page.getElementsByTagName("link")) {
String href = x.getAttribute("href");
if (!href.contains(":")) {
href = SITE_URL + href;
System.err.println("link href="+href);
}
links.add(href);
}
// Causes ClassCastException com.gargoylesoftware.htmlunit.TextPage cannot be cast to com.gargoylesoftware.htmlunit.html.HtmlPage
//at com.gargoylesoftware.htmlunit.WebClient.makeWebResponseForJavaScriptUrl(WebClient.java:1241)
for (DomElement x : page.getElementsByTagName("iframe")) {
String src = x.getAttribute("src");
if (!src.contains(":")) {
src = SITE_URL + src;
System.err.println("iframe src="+src);
}
links.add(src);
}
return links;
}
private void reportOutage() {
try {
Desktop.getDesktop().browse(new URI(OUTAGE_MAP_URL));
} catch (Exception e) {
e.printStackTrace();
}
}
}
More or less yes - but i have to do a more deeper analysis.
But there is some hope for you ;-)
Your code tries to extract urls from a given web page. During the process you are adding the url 'javascript:""' to your list of urls to be processes. This url results in this class cast exception. If you do not add this url to the list, the test is working (at least for me).

hbase co-processor failing to load from shell

I am trying to add a coprocessor with one hbase table and it is failing with error -
2016-03-15 14:40:14,130 INFO org.apache.hadoop.hbase.regionserver.RSRpcServices: Open PRODUCT_DETAILS,,1457953190424.f687dd250bfd1f18ffbb8075fd625145.
2016-03-15 14:40:14,173 ERROR org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost: Failed to load coprocessor com.optymyze.coprocessors.ProductObserver
java.io.IOException: Failed on local exception: com.google.protobuf.InvalidProtocolBufferException: Protocol message end-group tag did not match expected tag.; Host Details : local host is: "mylocalhost/mylocalhostip"; destination host is: "mydestinationhost":9000;
at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:772)
to add co processor I did following -
hbase> disable 'PRODUCT_DETAILS'
hbase> alter 'PRODUCT_DETAILS', METHOD => 'table_att', 'coprocessor'=>'hdfs://mydestinationhost:9000/hbase-coprocessors-0.0.3-SNAPSHOT.jar|com.optymyze.coprocessors.ProductObserver|1001|arg1=1,arg2=2'
now enable 'PRODUCT_DETAILS' won't work.
co processor code is as follows-
package com.optymyze.coprocessors;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.slf4j.LoggerFactory.*;
/**
*
* Created by adnan on 14-03-2016.
*/
public class ProductObserver extends BaseRegionObserver {
private static final Logger LOGGER = getLogger(ProductObserver.class);
private static final String PRODUCT_DETAILS_TABLE = "PRODUCT_DETAILS";
public static final String COLUMN_FAMILY = "CF";
#Override
public void postPut(ObserverContext<RegionCoprocessorEnvironment> e, Put put, WALEdit edit, boolean writeToWAL) throws IOException {
List<KeyValue> kvs = put.getFamilyMap().get(Bytes.toBytes(COLUMN_FAMILY));
LOGGER.info("key values {}", kvs);
Map<String, Integer> qualifierVsValue = getMapForQualifierVsValuesForRequiredOnes(kvs);
LOGGER.info("qualifier values {}", qualifierVsValue);
List<Put> puts = createPuts(kvs, qualifierVsValue);
LOGGER.info("puts values {}", puts);
updateProductTable(e, puts);
LOGGER.info("puts done");
}
private void updateProductTable(ObserverContext<RegionCoprocessorEnvironment> e, List<Put> puts) throws IOException {
HTableInterface productTable = e.getEnvironment().getTable(Bytes.toBytes(PRODUCT_DETAILS_TABLE));
try {
productTable.put(puts);
}finally {
productTable.close();
}
}
private List<Put> createPuts(List<KeyValue> kvs, Map<String, Integer> qualifierVsValue) {
int salePrice, baseline = 0, finalPrice = 0;
List<Put> puts = new ArrayList<Put>(kvs.size());
for (KeyValue kv : kvs) {
if (kv.matchingQualifier(Bytes.toBytes("BASELINE"))) {
baseline = convertToZeroIfNull(qualifierVsValue, "PRICE")
- convertToZeroIfNull(qualifierVsValue, "PRICE")
* convertToZeroIfNull(qualifierVsValue, "DISCOUNT") / 100;
puts.add(newPut(kv, baseline));
}
if (kv.matchingQualifier(Bytes.toBytes("FINALPRICE"))) {
finalPrice = baseline + baseline * convertToZeroIfNull(qualifierVsValue, "UPLIFT") / 100;
puts.add(newPut(kv, finalPrice));
}
if (kv.matchingQualifier(Bytes.toBytes("SALEPRICE"))) {
salePrice = finalPrice * convertToZeroIfNull(qualifierVsValue, "VOLUME");
puts.add(newPut(kv, salePrice));
}
}
return puts;
}
private Map<String, Integer> getMapForQualifierVsValuesForRequiredOnes(List<KeyValue> kvs) {
Map<String, Integer> qualifierVsValue = new HashMap<String, Integer>();
for (KeyValue kv : kvs) {
getValueFromQualifier(kv, "PRICE", qualifierVsValue);
getValueFromQualifier(kv, "DISCOUNT", qualifierVsValue);
getValueFromQualifier(kv, "UPLIFT", qualifierVsValue);
getValueFromQualifier(kv, "VOLUME", qualifierVsValue);
}
return qualifierVsValue;
}
private Integer convertToZeroIfNull(Map<String, Integer> qualifierVsValue, String qualifier) {
Integer v = qualifierVsValue.get(qualifier);
return v == null ? 0 : v;
}
private void getValueFromQualifier(KeyValue kv, String qualifier, Map<String, Integer> qualifierVsValue) {
if (kv.matchingQualifier(Bytes.toBytes(qualifier))) {
qualifierVsValue.put(qualifier, Bytes.toInt(convertToByteZeroIfNull(kv)));
}
}
private Put newPut(KeyValue kv, int newVal) {
Put put = new Put(kv.getValue(), kv.getTimestamp());
put.add(kv.getFamily(), kv.getQualifier(), Bytes.toBytes(newVal));
return put;
}
private byte[] convertToByteZeroIfNull(KeyValue kv) {
return kv.getValue() == null ? Bytes.toBytes(0) : kv.getValue();
}
}

could not initialize proxy - no Session

[ERROR] could not initialize proxy - no Session
org.hibernate.LazyInitializationException: could not initialize proxy - no Session
at org.hibernate.proxy.AbstractLazyInitializer.initialize(AbstractLazyInitializer.java:57)
at org.hibernate.proxy.AbstractLazyInitializer.getImplementation(AbstractLazyInitializer.java:111)
at org.hibernate.proxy.pojo.cglib.CGLIBLazyInitializer.invoke(CGLIBLazyInitializer.java:150)
at com.model.dto.Distination$$EnhancerByCGLIB$$f4a8517c.getLibileDis(<generated>)
at com.business.impl.MeteobussinesImpl.afficherMeteo1(MeteobussinesImpl.java:30)
at com.test.Tester.main(Tester.java:32)
[ERROR] could not initialize proxy - no Session
org.hibernate.LazyInitializationException: could not initialize proxy - no Session
at org.hibernate.proxy.AbstractLazyInitializer.initialize(AbstractLazyInitializer.java:57)
at org.hibernate.proxy.AbstractLazyInitializer.getImplementation(AbstractLazyInitializer.java:111)
at org.hibernate.proxy.pojo.cglib.CGLIBLazyInitializer.invoke(CGLIBLazyInitializer.java:150)
at com.model.dto.Distination$$EnhancerByCGLIB$$f4a8517c.toString(<generated>)
at com.model.dto.Distination$$EnhancerByCGLIB$$f4a8517c.getLibileDis(<generated>)
at com.business.impl.MeteobussinesImpl.afficherMeteo1(MeteobussinesImpl.java:30)
at com.test.Tester.main(Tester.java:32)
1.class main
package com.test;
import java.util.Iterator;
import java.util.List;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import com.business.Meteobussines;
import com.business.impl.MeteobussinesImpl;
import com.model.vo.Meteo;
public class Tester {
static public void displayList(List list)
{ Iterator iter = list.iterator();
if (!iter.hasNext())
{ System.out.println("La lsite est vide"); return; }
while (iter.hasNext()) {
Meteo ct = (Meteo) iter.next();
System.out.println("tempsMax :" + ct.getTempMax() + " pays :" + ct.getLibilePays() + " distination :" + ct.getLibileDistination()); }}
public static void main(String[] args) {
// TODO Auto-generated method stub
String[] configArray = new String[] { "/com/resource/spring/ApplicationContext.xml",
"/com/resource/spring/ApplicationContextDao.xml","/com/resource/spring/ApplicationContextBusiness.xml"};
ApplicationContext ctx = new ClassPathXmlApplicationContext(configArray);
Meteobussines mete = (Meteobussines) ctx.getBean("MeteoBuss");
List<Meteo> m = mete.afficherMeteo1("tounes");
displayList(m);
}
}
2.Meteobussiness
package com.business.impl;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import com.business.Meteobussines;
import com.dao.HistoriqueDao;
import com.dao.impl.HistoriqueDaoImpl;
import com.model.dto.Historique;
import com.model.vo.Meteo;
public class MeteobussinesImpl implements Meteobussines {
HistoriqueDao historiqueDao ;
#SuppressWarnings("null")
#Override
public List<Meteo> afficherMeteo1(String pays) {
List<Historique> hiss= historiqueDao.rechercher(pays);
List<Meteo> m = new ArrayList<Meteo>(); ;
Iterator iter = hiss.iterator();
if (!iter.hasNext())
{ System.out.println("La lsite est vide"); }
while (iter.hasNext()) {
Historique ct = (Historique) iter.next();
Meteo me =new Meteo();
me.setDateHis(ct.getDateHis());
me.setLibileDistination(ct.getDistination().getLibileDis());
me.setLibilePays(ct.getPays().getLibilePays());
me.setLibileVille(ct.getVille().getLibileVille());
me.setTempMax(ct.getTempMax());
me.setTempMin(ct.getTempMin());
m.add(me);
}
return m;
// TODO Auto-generated method stub
}
#Override
public List<Meteo> afficherMeteo2(String pays, String ville) {
// TODO Auto-generated method stub
return null;
}
#Override
public List<Meteo> afficherMeteo3(String pays, String ville,
String distination) {
// TODO Auto-generated method stub
return null;
}
public HistoriqueDao getHistoriqueDao() {
return historiqueDao;
}
public void setHistoriqueDao(HistoriqueDao historiqueDao) {
this.historiqueDao = historiqueDao;
}
}
3.HistroiqueDaao
package com.dao.impl;
import java.util.List;
import org.hibernate.Criteria;
import org.hibernate.FetchMode;
import org.hibernate.criterion.Expression;
import com.dao.HistoriqueDao;
import com.model.dto.Historique;
#SuppressWarnings("unchecked")
public class HistoriqueDaoImpl extends GenericDaoImpl implements HistoriqueDao {
#Override
public List<Historique> rechercher(String critere1, String critere2,
String critere3) {
Criteria crit = getSession().createCriteria(Historique.class);
crit.setFetchMode("pays", FetchMode.JOIN);
crit.createAlias("pays", "p");
crit.add(Expression.eq("p.libilePays", critere1));
crit.setFetchMode("ville", FetchMode.JOIN);
crit.createAlias("ville", "b");
crit.add(Expression.eq("b.libileVille", critere2));
crit.setFetchMode("distination", FetchMode.JOIN);
crit.createAlias("distination", "d");
crit.add(Expression.eq("d.libileDis", critere3));
List<Historique> his = crit.list();
return his;
}
#Override
public List<Historique> rechercher(String critere1, String critere2) {
// TODO Auto-generated method stub
Criteria crit = getSession().createCriteria(Historique.class);
crit.setFetchMode("pays", FetchMode.JOIN);
crit.createAlias("pays", "p");
crit.add(Expression.eq("p.libilePays", critere1));
crit.setFetchMode("ville", FetchMode.JOIN);
crit.createAlias("ville", "b");
crit.add(Expression.eq("b.libileVille", critere2));
List<Historique> his = crit.list();
return his;
}
#Override
public List<Historique> rechercher(String critere1) {
// TODO Auto-generated method stub
Criteria crit = getSession().createCriteria(Historique.class);
crit.setFetchMode("pays", FetchMode.JOIN);
crit.createAlias("pays", "p");
crit.add(Expression.eq("p.libilePays", critere1));
List<Historique> his = crit.list();
return his;
}
}
3.i use DaoSupport initialize in spring
The stack trace points to this line:
me.setLibileDistination(ct.getDistination().getLibileDis());
ct is a Historique proxy = a fake object whose fields (including the one you obtain through getDistination()) are not yet initiated.
Please initialize the fields you are interested in before committing the Hibernate transaction.
Hibernate.initialize(ct.getDistination());
crit.setFetchMode("pays", FetchMode.JOIN);
crit.setFetchMode("ville", FetchMode.JOIN);
crit.setFetchMode("distination", FetchMode.JOIN);
this the soultion of the problem cause i have in all the case to open the fetchmode.join

Resources