Not getting response for query in flink queryable state [version-1.7.2] - java-8

I am querying to proxy server of flink cluster which is on 127.0.1.1:9069 but not getting response for query. I am calculating sum of all inputted numbers by creating a server on 9000 port. Also I am storing the sum in Value State.
Flink Job:
private transient ValueState<Tuple2<String, Long>> sum;
#Override
public void flatMap(Tuple2<Long, Long> input, Collector<Tuple2<String,Long>> out) throws Exception {
if (input.f1==-1){
sum.clear();
return;
}
Tuple2<String, Long> currentSum = sum.value();
currentSum.f1 += input.f1;
sum.update(currentSum);
System.out.println("Current Sum: "+(sum.value().f1)+"\nCurrent Count: "+(sum.value().f0));
out.collect(new Tuple2<>("sum", sum.value().f1));
}
#Override
public void open(Configuration config) {
ValueStateDescriptor<Tuple2<String, Long>> descriptor =
new ValueStateDescriptor<>(
"sum", // the state name
TypeInformation.of(new TypeHint<Tuple2<String, Long>>() {}),
Tuple2.of("sum", 0L)); // default value of the state, if nothing was set
sum = getRuntimeContext().getState(descriptor);
}
inp.flatMap(new FlatMapFunction<String, Tuple2<Long, Long>>() {
#Override
public void flatMap(String inpstr, Collector<Tuple2<Long, Long>> out) throws Exception{
for (String word : inpstr.split("\\s")) {
try {
if(word.equals("quit")){
throw new QuitValueState( "Stoppping!!!",hostname,port);
}
if(word.equals("clear")){
word="-1";
}
out.collect(Tuple2.of(1L, Long.valueOf(word)));
}
catch ( NumberFormatException e) {
System.out.println("Enter valid number: "+e.getMessage());
}catch (QuitValueState ex){
System.out.println("Quitting!!!");
}
}
}
}).keyBy(0).flatMap(new StreamingJob())
.keyBy(0).asQueryableState("query-name");
On flink cluster I am able to see proxy server at 127.0.1.1:9069
Client side:
public static void main(String[] args) throws IOException, InterruptedException, Exception {
QueryableStateClient client = new QueryableStateClient("127.0.1.1", 9069);
System.out.println("Querying on "+args[0]);
JobID jobId = JobID.fromHexString(args[0]);
ValueStateDescriptor<Tuple2<String, Long>> descriptor =
new ValueStateDescriptor<>(
"sum",
TypeInformation.of(new TypeHint<Tuple2<String, Long>>() {
}));
CompletableFuture<ValueState<Tuple2<String, Long>>> resultFuture =
client.getKvState(jobId, "query-name", "sum", BasicTypeInfo.STRING_TYPE_INFO, descriptor);
System.out.println(resultFuture);
resultFuture.thenAccept(response -> {
try {
Tuple2<String, Long> res = response.value();
System.out.println("Queried sum value: " + res);
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("Exiting future ...");
});
}

Related

Spring JDBC insert into DB will not work

For some reason my jdbctemplate is not picking up the info in my application.properties file. I can only insert in my db when I make my own connection exposing my username and password in the file. Why isn't spring boot picking this up? Please help!
I am calling the addTaglocation mode while I am still reading tags from multiple RFID readers. Would this cause the issue? If so how do I solve it. Thanks in advance.
I start reading the tag from multiple rfid readers from a controller.
Full class where I am calling the create method (addTaglocation method):
public class TagInventory extends AlienClass1Reader implements
MessageListener, TagTableListener{
private final int MAX_THREAD = 50;
private Thread[] m_run_process = new Thread[MAX_THREAD];
private AlienReader[] m_inventory = new AlienReader[MAX_THREAD];
public boolean stopInventory = false;
ReaderProfileService rps;
VehicleService vs;
private boolean ThreadStop = true;
private int lastThreadId = -1;
private MessageListenerService service;
private TagTable tagTable = new TagTable();
private TagTableListener tagTableListener;
private static final Logger log =LogManager.getLogger(TagInventory.class);
#Autowired
TaglocationDAOJdbc taglocationDAOJdbc;
public TagInventory() throws AlienReaderException, IOException{
Start();
}
public void stopTag() throws AlienReaderException{
Stop();
}
private void Stop() throws AlienReaderException{
ThreadStop = true;
for (lastThreadId=0; lastThreadId < Reader.ipAddress.length;
lastThreadId++){
if(m_inventory[lastThreadId] != null){
m_inventory[lastThreadId].stopInventory = true;
service.stopService();
try{
Thread.sleep(200);
}catch(Exception e){
e.getMessage();
}
//m_inventory[lastThreadId].close();
m_inventory[lastThreadId].open();
m_inventory[lastThreadId].autoModeReset();
m_inventory[lastThreadId].setAutoMode(AlienClass1Reader.OFF);
m_inventory[lastThreadId].setNotifyMode(AlienClass1Reader.OFF);
m_inventory[lastThreadId].close();
}
}
}
private void Start() throws AlienReaderException, IOException{
ThreadStop = false;
service= new MessageListenerService(3900);
service.setMessageListener(this);
service.startService();
for (lastThreadId = 0; lastThreadId < Reader.ipAddress.length; lastThreadId++)
{
m_inventory[lastThreadId] = new AlienReader(Reader.ipAddress[lastThreadId], Reader.port, Reader.username[lastThreadId], Reader.password[lastThreadId]);
log.info("taginventory reader: "+ Reader.ipAddress[lastThreadId]+"Thread: "+lastThreadId);
m_run_process[lastThreadId] = new Thread(new StartInventoryThread(Reader.ipAddress[lastThreadId], Reader.port, Reader.username[lastThreadId], Reader.password[lastThreadId], m_inventory[lastThreadId]));
m_run_process[lastThreadId].start();
}
--lastThreadId;
try
{
Thread.sleep(1000);
}
catch (Exception ex)
{
ex.getMessage();
}
}
class StartInventoryThread implements Runnable{
private String ip;
private int port;
private String user;
private String pwd;
private AlienReader ar;
StartInventoryThread(String ip, int port, String user, String pwd, AlienReader ar){
this.ip=ip;
this.port=port;
this.user=user;
this.pwd=pwd;
this.ar=ar;
}
#Override
public void run() {
try {
while(!stopInventory){
startRead(ip,port,user,pwd);
}
} catch (AlienReaderException | InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (UnknownHostException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
#Override
public void tagAdded(Tag tag) {
log.info("NEW TAG: " + tag.getTagID()+ " LAST SEEN DATE: "+ tag.getRenewTime());
}
#Override
public void tagRemoved(Tag tag) {
// TODO Auto-generated method stub
}
#Override
public void tagRenewed(Tag tag) {
// TODO Auto-generated method stub
}
#Override
public synchronized void messageReceived(Message msg) {
if(msg instanceof ErrorMessage){
// log.info("Notify error from " + msg.getReaderIPAddress());
}else if (msg.getTagCount() == 0){
log.info("No tags!");
}else{
//log.info("Message received from: "+msg.getReaderIPAddress());
Tag[] tagL=msg.getTagList();
//String[] tagLString=new String[tagL.length];
for (int i=0;i<msg.getTagCount(); i++){
Tag tag = msg.getTag(i);
//System.out.println("Tag ID: "+tag.getTagID()+ " Last Seen: "+tag.getRenewTime());
this.tagTable.addTag(tag);
// log.info("Tag ID: "+tag.getTagID()+ " Last Seen: "+tag.getRenewTime()+ " Receive Antenna: "+tag.getReceiveAntenna());
//System.out.println("Tag: "+tag+ " Last Seen: "+tag.getRenewTime());
}
}
//check readerprofile
try {
updateLocation(Reader.ipAddress[this.lastThreadId]);
this.tagTable.removeOldTags();
} catch (NoReaderInfoException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (NoVehicleException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void addTaglocation(Taglocation tl){
Taglocation newtagloca= new Taglocation(tl.getReadername(),tl.getRfidtag(),tl.getZone(), tl.getTaggingsource(), tl.getVineight(), tl.getZonedate());
taglocationDAOJdbc.addtaglocation(newtagloca);
log.info("ADDED TAGG INFO!");
}
public void updateLocation(String read) throws NoReaderInfoException, NoVehicleException{
//update location of vehicle when tag read by readers
log.info("IN UPDATE LOCATION with reader: "+read);
Tag[] temp;
temp=this.tagTable.getTagList();
for (int i = 0; i < temp.length; i++) {
log.info("get taglist");
Tag tag = temp[i];
String rfid=tag.getTagID();
Timestamp newDate=new Timestamp(Calendar.getInstance().getTime().getTime());
Taglocation tl=new Taglocation(read, rfid, read, "", "vineight", newDate);
addTaglocation(tl);
}
}
public void startRead(String ip, int port, String user, String password) throws AlienReaderException, InterruptedException, UnknownHostException{
String myIP=InetAddress.getLocalHost().getHostAddress();
System.out.println("ip"+ ip);
AlienReader ar= new AlienReader(ip, port, user, password);
ar.open();
log.info("Reader" + ar.getIPAddress());
ar.setNotifyAddress(myIP, 3900);
ar.setNotifyFormat(AlienClass1Reader.TEXT_FORMAT);
ar.setNotifyTrigger("TrueFalse");
ar.setNotifyMode(AlienClass1Reader.ON);
ar.autoModeReset();
ar.setAutoStopTimer(5000); // Read for 5 seconds
ar.setAutoMode(AlienClass1Reader.ON);
tagTable.setTagTableListener(tagTableListener);
tagTable.setPersistTime(5000);
//tagTable.setPersistTime(1800000);
ar.close();
long runTime = 10000; // milliseconds
long startTime = System.currentTimeMillis();
do {
Thread.sleep(1000);
} while(service.isRunning()
&& (System.currentTimeMillis()-startTime) < runTime);
// Reconnect to the reader and turn off AutoMode and TagStreamMode.
log.info("\nResetting Reader");
ar.open();
ar.autoModeReset();
ar.setNotifyMode(AlienClass1Reader.OFF);
ar.close();
}
}
(UPDATED)
Application.properties file:
spring.datasource.url=jdbc:mysql://localhost:3306/DB
spring.datasource.username=
spring.datasource.password=
spring.datasource.driver-class-name=com.mysql.jdbc.Driver
(UPDATED)
taglocationJDBCTemplate file
#Repository
public class TaglocationJDBCTemplate implements TaglocationDAO {
#Autowired
JdbcTemplate jdbcTemplate;
public void create(Taglocation tl){
KeyHolder keyHolder = new GeneratedKeyHolder();
jdbcTemplate.update(new PreparedStatementCreator(){
public PreparedStatement createPreparedStatement(final Connection connection) throws SQLException {
PreparedStatement ps = connection.prepareStatement("INSERT INTO TAGLOCATION (READERNAME, RFIDTAG, TAGGINGSOURCE, ZONE, VINEIGHT, ZONEDATE) VALUES (?,?,?,?,?,?)",
Statement.RETURN_GENERATED_KEYS);
ps.setString(1, tl.getReadername());
ps.setString(2,tl.getRfidtag());
ps.setString(3, tl.getTaggingsource());
ps.setString(4, tl.getZone());
ps.setString(5, tl.getVineight());
ps.setTimestamp(6, new Timestamp(System.currentTimeMillis()));
return ps;
}
}, keyHolder);
return (Integer) keyHolder.getKey();
}
controller file:
#RestController
public class TagInventoryController {
#Autowired
TagService tagService;
#RequestMapping("/runTasks")
public String tagRead(){
tagService.startTagRead();
return "Readers are activated.";
}
}

How to use Netty's channel pool map as a ConnectorProvider for a Jax RS client

I have wasted several hours trying to solve a issue with the use of netty's channel pool map and a jax rs client.
I have used jersey's own netty connector as an inspiration but exchanged netty's channel with netty's channel pool map.
https://jersey.github.io/apidocs/2.27/jersey/org/glassfish/jersey/netty/connector/NettyConnectorProvider.html
My problem is that I have references that I need inside my custom SimpleChannelInboundHandler. However by the design of netty's way to create a channel pool map, I can not pass the references through my custom ChannelPoolHandler, because as soon as the pool map has created a pool the constructor of the channel pool handler never runs again.
This is the method where it makes acquires a pool and check out a channel to make a HTTP request.
#Override
public Future<?> apply(ClientRequest request, AsyncConnectorCallback callback) {
final CompletableFuture<Object> completableFuture = new CompletableFuture<>();
try{
HttpRequest httpRequest = buildHttpRequest(request);
// guard against prematurely closed channel
final GenericFutureListener<io.netty.util.concurrent.Future<? super Void>> closeListener =
future -> {
if (!completableFuture.isDone()) {
completableFuture.completeExceptionally(new IOException("Channel closed."));
}
};
try {
ClientRequestDTO clientRequestDTO = new ClientRequestDTO(NettyChannelPoolConnector.this, request, completableFuture, callback);
dtoMap.putIfAbsent(request.getUri(), clientRequestDTO);
// Retrieves a channel pool for the given host
FixedChannelPool pool = this.poolMap.get(clientRequestDTO);
// Acquire a new channel from the pool
io.netty.util.concurrent.Future<Channel> f = pool.acquire();
f.addListener((FutureListener<Channel>) futureWrite -> {
//Succeeded with acquiring a channel
if (futureWrite.isSuccess()) {
Channel channel = futureWrite.getNow();
channel.closeFuture().addListener(closeListener);
try {
if(request.hasEntity()) {
channel.writeAndFlush(httpRequest);
final JerseyChunkedInput jerseyChunkedInput = new JerseyChunkedInput(channel);
request.setStreamProvider(contentLength -> jerseyChunkedInput);
if(HttpUtil.isTransferEncodingChunked(httpRequest)) {
channel.write(jerseyChunkedInput);
} else {
channel.write(jerseyChunkedInput);
}
executorService.execute(() -> {
channel.closeFuture().removeListener(closeListener);
try {
request.writeEntity();
} catch (IOException ex) {
callback.failure(ex);
completableFuture.completeExceptionally(ex);
}
});
channel.flush();
} else {
channel.closeFuture().removeListener(closeListener);
channel.writeAndFlush(httpRequest);
}
} catch (Exception ex) {
System.err.println("Failed to sync and flush http request" + ex.getLocalizedMessage());
}
pool.release(channel);
}
});
} catch (NullPointerException ex) {
System.err.println("Failed to acquire socket from pool " + ex.getLocalizedMessage());
}
} catch (Exception ex) {
completableFuture.completeExceptionally(ex);
return completableFuture;
}
return completableFuture;
}
This is my ChannelPoolHandler
public class SimpleChannelPoolHandler implements ChannelPoolHandler {
private ClientRequestDTO clientRequestDTO;
private boolean ssl;
private URI uri;
private int port;
SimpleChannelPoolHandler(URI uri) {
this.uri = uri;
if(uri != null) {
this.port = uri.getPort() != -1 ? uri.getPort() : "https".equals(uri.getScheme()) ? 443 : 80;
ssl = "https".equalsIgnoreCase(uri.getScheme());
}
}
#Override
public void channelReleased(Channel ch) throws Exception {
System.out.println("Channel released: " + ch.toString());
}
#Override
public void channelAcquired(Channel ch) throws Exception {
System.out.println("Channel acquired: " + ch.toString());
}
#Override
public void channelCreated(Channel ch) throws Exception {
System.out.println("Channel created: " + ch.toString());
int readTimeout = Integer.parseInt(ApplicationEnvironment.getInstance().get("READ_TIMEOUT"));
SocketChannelConfig channelConfig = (SocketChannelConfig) ch.config();
channelConfig.setConnectTimeoutMillis(2000);
ChannelPipeline channelPipeline = ch.pipeline();
if(ssl) {
SslContext sslContext = SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).build();
channelPipeline.addLast("ssl", sslContext.newHandler(ch.alloc(), uri.getHost(), this.port));
}
channelPipeline.addLast("client codec", new HttpClientCodec());
channelPipeline.addLast("chunked content writer",new ChunkedWriteHandler());
channelPipeline.addLast("content decompressor", new HttpContentDecompressor());
channelPipeline.addLast("read timeout", new ReadTimeoutHandler(readTimeout, TimeUnit.MILLISECONDS));
channelPipeline.addLast("business logic", new JerseyNettyClientHandler(this.uri));
}
}
And this is my SimpleInboundHandler
public class JerseyNettyClientHandler extends SimpleChannelInboundHandler<HttpObject> {
private final NettyChannelPoolConnector nettyChannelPoolConnector;
private final LinkedBlockingDeque<InputStream> isList = new LinkedBlockingDeque<>();
private final AsyncConnectorCallback asyncConnectorCallback;
private final ClientRequest jerseyRequest;
private final CompletableFuture future;
public JerseyNettyClientHandler(ClientRequestDto clientRequestDTO) {
this.nettyChannelPoolConnector = clientRequestDTO.getNettyChannelPoolConnector();
ClientRequestDTO cdto = clientRequestDTO.getNettyChannelPoolConnector().getDtoMap().get(clientRequestDTO.getClientRequest());
this.asyncConnectorCallback = cdto.getCallback();
this.jerseyRequest = cdto.getClientRequest();
this.future = cdto.getFuture();
}
#Override
protected void channelRead0(ChannelHandlerContext ctx, HttpObject msg) throws Exception {
if(msg instanceof HttpResponse) {
final HttpResponse httpResponse = (HttpResponse) msg;
final ClientResponse response = new ClientResponse(new Response.StatusType() {
#Override
public int getStatusCode() {
return httpResponse.status().code();
}
#Override
public Response.Status.Family getFamily() {
return Response.Status.Family.familyOf(httpResponse.status().code());
}
#Override
public String getReasonPhrase() {
return httpResponse.status().reasonPhrase();
}
}, jerseyRequest);
for (Map.Entry<String, String> entry : httpResponse.headers().entries()) {
response.getHeaders().add(entry.getKey(), entry.getValue());
}
if((httpResponse.headers().contains(HttpHeaderNames.CONTENT_LENGTH) && HttpUtil.getContentLength(httpResponse) > 0) || HttpUtil.isTransferEncodingChunked(httpResponse)) {
ctx.channel().closeFuture().addListener(future -> isList.add(NettyInputStream.END_OF_INPUT_ERROR));
response.setEntityStream(new NettyInputStream(isList));
} else {
response.setEntityStream(new InputStream() {
#Override
public int read() {
return -1;
}
});
}
if(asyncConnectorCallback != null) {
nettyChannelPoolConnector.executorService.execute(() -> {
asyncConnectorCallback.response(response);
future.complete(response);
});
}
}
if(msg instanceof HttpContent) {
HttpContent content = (HttpContent) msg;
ByteBuf byteContent = content.content();
if(byteContent.isReadable()) {
byte[] bytes = new byte[byteContent.readableBytes()];
byteContent.getBytes(byteContent.readerIndex(), bytes);
isList.add(new ByteArrayInputStream(bytes));
}
}
if(msg instanceof LastHttpContent) {
isList.add(NettyInputStream.END_OF_INPUT);
}
}
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
if(asyncConnectorCallback != null) {
nettyChannelPoolConnector.executorService.execute(() -> asyncConnectorCallback.failure(cause));
}
future.completeExceptionally(cause);
isList.add(NettyInputStream.END_OF_INPUT_ERROR);
}
The references needed to be passed to the SimpleChannelInboundHandler is what is packed into the ClientRequestDTO as seen in the first code block.
I am not sure as it is not a tested code. But it could be achieved by the following code.
SimpleChannelPool sPool = poolMap.get(Req.getAddress());
Future<Channel> f = sPool.acquire();
f.get().pipeline().addLast("inbound", new NettyClientInBoundHandler(Req, jbContext, ReportData));
f.addListener(new NettyClientFutureListener(this.Req, sPool));
where Req, jbContext, ReportData could be input data for InboundHandler().

Spark-Streaming CustomReceiver Unknown Host Exception

I am new to spark streaming. I want to stream a url online in order to retrieve info from a certain URL, I used the JavaCustomReceiver in order to stream a url.
This is the code I'm using (source)
public class JavaCustomReceiver extends Receiver<String> {
private static final Pattern SPACE = Pattern.compile(" ");
public static void main(String[] args) throws Exception {
SparkConf sparkConf = new SparkConf().setAppName("JavaCustomReceiver");
JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, new Duration(1000));
JavaReceiverInputDStream<String> lines = ssc.receiverStream(
new JavaCustomReceiver("http://stream.meetup.com/2/rsvps", 80));
JavaDStream<String> words = lines.flatMap(new
FlatMapFunction<String, String>() {
#Override
public Iterator<String> call(String x) {
return Arrays.asList(SPACE.split(x)).iterator();
}
});
JavaPairDStream<String, Integer> wordCounts = words.mapToPair(
new PairFunction<String, String, Integer>() {
#Override
public Tuple2<String, Integer> call(String s) {
return new Tuple2<>(s, 1);
}
}).reduceByKey(new Function2<Integer, Integer, Integer>() {
#Override
public Integer call(Integer i1, Integer i2) {
return i1 + i2;
}
});
wordCounts.print();
ssc.start();
ssc.awaitTermination();
}
String host = null;
int port = -1;
public JavaCustomReceiver(String host_, int port_) {
super(StorageLevel.MEMORY_AND_DISK_2());
host = host_;
port = port_;
}
public void onStart() {
new Thread() {
#Override
public void run() {
receive();
}
}.start();
}
public void onStop() {
}
private void receive() {
try {
Socket socket = null;
BufferedReader reader = null;
String userInput = null;
try {
// connect to the server
socket = new Socket(host, port);
reader = new BufferedReader(
new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8));
// Until stopped or connection broken continue reading
while (!isStopped() && (userInput = reader.readLine()) != null) {
System.out.println("Received data '" + userInput + "'");
store(userInput);
}
} finally {
Closeables.close(reader, /* swallowIOException = */ true);
Closeables.close(socket, /* swallowIOException = */ true);
}
restart("Trying to connect again");
} catch (ConnectException ce) {
// restart if could not connect to server
restart("Could not connect", ce);
} catch (Throwable t) {
restart("Error receiving data", t);
}
}
}
However, I keep getting a java.net.UnknownHostException
How can I fix this? What is wrong with the code that I'm using ?
After reading the code of the custom receiver referenced, it is clear that it is a TCP receiver that connects to a host:port and not an HTTP receiver that could take an URL. You'll have to change the code to read from an HTTP endpoint.

Catch Elasticsearch bulk errors when using bulkProcessor

I use bulkProcessor to insert/update bulks in ElasticSearch.
I would like to catch
EsRejectedExecutionException
VersionConflictEngineException
DocumentAlreadyExistsException
but it doesn't throw anything.
It only set a message on the response item.
How can I handle it properly? e.g. applicative retry if rejected...
public BulkResponse response bulkUpdate(.....) {
BulkResponse bulkWriteResult = null;
long startTime = System.currentTimeMillis();
AtomicInteger amountOfRequests = new AtomicInteger();
long esTime;
ElasticBulkProcessorListener listener = new ElasticBulkProcessorListener(updateOperations);
BulkProcessor bulkProcessor = BulkProcessor.builder(client, listener)
.setBulkActions(MAX_BULK_ACTIONS)
.setBulkSize(new ByteSizeValue(maxBulkSize, ByteSizeUnit.MB))
.setConcurrentRequests(5)
.build();
updateOperations.forEach(updateRequest -> {
bulkProcessor.add(updateRequest);
amountOfRequests.getAndIncrement();
});
try {
boolean isFinished = bulkProcessor.awaitClose(bulkTimeout, TimeUnit.SECONDS);
if (isFinished) {
if (listener.getBulkWriteResult() != null) {
bulkWriteResult = listener.getBulkWriteResult();
} else {
throw new Exception("Bulk updating failed, results are empty");
}
} else {
throw new Exception("Bulk updating failed, received timeout");
}
} catch (InterruptedException e) {
e.printStackTrace();
}
return bulkWriteResult;
}
public class ElasticBulkProcessorListener implements BulkProcessor.Listener {
private long esTime = 0;
private List<Throwable> errors;
private BulkResponse response;
public long getEsTime() {
return esTime;
}
#Override
public void beforeBulk(long executionId, BulkRequest request) {
String description = "";
if (!request.requests().isEmpty()) {
ActionRequest request1 = request.requests().get(0);
description = ((UpdateRequest) request1).type();
}
log.info("Bulk executionID: {}, estimated size is: {}MB, number of actions: {}, request type: {}",
executionId, (request.estimatedSizeInBytes() / 1000000), request.numberOfActions(), description);
}
#Override
public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
log.info("Bulk executionID: {}, took : {} Millis, bulk size: {}", executionId, response.getTookInMillis(), response.getItems().length);
esTime = response.getTookInMillis();
response = createBulkUpdateResult(response);
}
#Override
public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
log.error("Bulk , failed! error: ", executionId, failure);
throw new DataFWCoreException(String.format("Bulk executionID: %d, update operation failed", executionId), failure);
}
}
The failure handler will be called only when network failure occurred,
Any other case will get success handler.
The only way to handle exception as I mention above is by parse each response item and figure out what happened.

android volley JsonArrayRequest return nothing

in the below code my arrayList will be empty after JsonArrayRequest block.
I set break point at this line: "int size = arrayList.size();"
every thing is OK until "while" loop finishes. after that allayList is empty.
JsonArrayRequest jsonArrayRequest = new JsonArrayRequest(Request.Method.GET, json_url,(String) null,
new Response.Listener<JSONArray>() {
#Override
public void onResponse(JSONArray response) {
int count=0;
int responseLength = response.length();
responseLength--;
while (count<responseLength)
{
try {
JSONObject jsonObject = response.getJSONObject(count);
Contact contact = new Contact(jsonObject.getString("title"),
jsonObject.getString("email"),
jsonObject.getString("description"),
jsonObject.getString("date"),
jsonObject.getBoolean("status"));
arrayList.add(contact);
int size = arrayList.size();
count++;
} catch (JSONException e) {
e.printStackTrace();
}
}
int size = arrayList.size();
}
},
new Response.ErrorListener() {
#Override
public void onErrorResponse(VolleyError error) {
Toast.makeText(context,"Error....",Toast.LENGTH_SHORT).show();
error.printStackTrace();
}
}
);
int size = arrayList.size();
VolleySingleton.getmInstance(context).addToRequestQueue(jsonArrayRequest);
return arrayList;
I will show what i did using CallBack interface:
in onCreate() method:
recyclerView = (RecyclerView) findViewById(R.id.recyclerview);
recyclerView.setHasFixedSize(true);
LinearLayoutManager linearLayoutManager = new LinearLayoutManager(this);
recyclerView.setLayoutManager(linearLayoutManager);
BackgroundTask backgroundTask = new BackgroundTask(this);
backgroundTask.getContacts(new BackgroundTask.arrayListCallBack() {
#Override
public void onSuccess(ArrayList<Contact> contacts) {
RecyclerView.Adapter adapter = new RecyclerAdapter(MainActivity.this, contacts);
recyclerView.setAdapter(adapter);
}
#Override
public void onFail(String error) {
Toast.makeText(MainActivity.this, error, Toast.LENGTH_LONG).show();
}
});
and in the BackgroundTask class:
JsonArrayRequest jsonArrayRequest = new JsonArrayRequest(Request.Method.POST, server_url, new Response.Listener<JSONArray>() {
#Override
public void onResponse(JSONArray response) {
int count = 0;
while (count < response.length()) {
try {
JSONObject jsonObject = response.getJSONObject(count);
Contact contact = new Contact(jsonObject.getString("name"), jsonObject.getString("section"));
contacts.add(contact);
Log.d("process request", "....."+jsonObject.getString("name"));
count++;
} catch (JSONException e) {
e.printStackTrace();
Toast.makeText(context, e.getMessage()+"\nError in Response", Toast.LENGTH_LONG).show();
}
callBack.onSuccess(contacts);
}
}
}, new Response.ErrorListener() {
#Override
public void onErrorResponse(VolleyError error) {
error.printStackTrace();
Toast.makeText(context, error.getMessage()+"\nError in Connection", Toast.LENGTH_LONG).show();
callBack.onFail("There's error ...");
}
});
MySingleton.getInstance(context).addToRequestQueue(jsonArrayRequest);
}
public interface arrayListCallBack {
void onSuccess(ArrayList<Contact> contacts);
void onFail(String error);
}

Resources