could not initialize proxy - no Session - spring

[ERROR] could not initialize proxy - no Session
org.hibernate.LazyInitializationException: could not initialize proxy - no Session
at org.hibernate.proxy.AbstractLazyInitializer.initialize(AbstractLazyInitializer.java:57)
at org.hibernate.proxy.AbstractLazyInitializer.getImplementation(AbstractLazyInitializer.java:111)
at org.hibernate.proxy.pojo.cglib.CGLIBLazyInitializer.invoke(CGLIBLazyInitializer.java:150)
at com.model.dto.Distination$$EnhancerByCGLIB$$f4a8517c.getLibileDis(<generated>)
at com.business.impl.MeteobussinesImpl.afficherMeteo1(MeteobussinesImpl.java:30)
at com.test.Tester.main(Tester.java:32)
[ERROR] could not initialize proxy - no Session
org.hibernate.LazyInitializationException: could not initialize proxy - no Session
at org.hibernate.proxy.AbstractLazyInitializer.initialize(AbstractLazyInitializer.java:57)
at org.hibernate.proxy.AbstractLazyInitializer.getImplementation(AbstractLazyInitializer.java:111)
at org.hibernate.proxy.pojo.cglib.CGLIBLazyInitializer.invoke(CGLIBLazyInitializer.java:150)
at com.model.dto.Distination$$EnhancerByCGLIB$$f4a8517c.toString(<generated>)
at com.model.dto.Distination$$EnhancerByCGLIB$$f4a8517c.getLibileDis(<generated>)
at com.business.impl.MeteobussinesImpl.afficherMeteo1(MeteobussinesImpl.java:30)
at com.test.Tester.main(Tester.java:32)
1.class main
package com.test;
import java.util.Iterator;
import java.util.List;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import com.business.Meteobussines;
import com.business.impl.MeteobussinesImpl;
import com.model.vo.Meteo;
public class Tester {
static public void displayList(List list)
{ Iterator iter = list.iterator();
if (!iter.hasNext())
{ System.out.println("La lsite est vide"); return; }
while (iter.hasNext()) {
Meteo ct = (Meteo) iter.next();
System.out.println("tempsMax :" + ct.getTempMax() + " pays :" + ct.getLibilePays() + " distination :" + ct.getLibileDistination()); }}
public static void main(String[] args) {
// TODO Auto-generated method stub
String[] configArray = new String[] { "/com/resource/spring/ApplicationContext.xml",
"/com/resource/spring/ApplicationContextDao.xml","/com/resource/spring/ApplicationContextBusiness.xml"};
ApplicationContext ctx = new ClassPathXmlApplicationContext(configArray);
Meteobussines mete = (Meteobussines) ctx.getBean("MeteoBuss");
List<Meteo> m = mete.afficherMeteo1("tounes");
displayList(m);
}
}
2.Meteobussiness
package com.business.impl;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import com.business.Meteobussines;
import com.dao.HistoriqueDao;
import com.dao.impl.HistoriqueDaoImpl;
import com.model.dto.Historique;
import com.model.vo.Meteo;
public class MeteobussinesImpl implements Meteobussines {
HistoriqueDao historiqueDao ;
#SuppressWarnings("null")
#Override
public List<Meteo> afficherMeteo1(String pays) {
List<Historique> hiss= historiqueDao.rechercher(pays);
List<Meteo> m = new ArrayList<Meteo>(); ;
Iterator iter = hiss.iterator();
if (!iter.hasNext())
{ System.out.println("La lsite est vide"); }
while (iter.hasNext()) {
Historique ct = (Historique) iter.next();
Meteo me =new Meteo();
me.setDateHis(ct.getDateHis());
me.setLibileDistination(ct.getDistination().getLibileDis());
me.setLibilePays(ct.getPays().getLibilePays());
me.setLibileVille(ct.getVille().getLibileVille());
me.setTempMax(ct.getTempMax());
me.setTempMin(ct.getTempMin());
m.add(me);
}
return m;
// TODO Auto-generated method stub
}
#Override
public List<Meteo> afficherMeteo2(String pays, String ville) {
// TODO Auto-generated method stub
return null;
}
#Override
public List<Meteo> afficherMeteo3(String pays, String ville,
String distination) {
// TODO Auto-generated method stub
return null;
}
public HistoriqueDao getHistoriqueDao() {
return historiqueDao;
}
public void setHistoriqueDao(HistoriqueDao historiqueDao) {
this.historiqueDao = historiqueDao;
}
}
3.HistroiqueDaao
package com.dao.impl;
import java.util.List;
import org.hibernate.Criteria;
import org.hibernate.FetchMode;
import org.hibernate.criterion.Expression;
import com.dao.HistoriqueDao;
import com.model.dto.Historique;
#SuppressWarnings("unchecked")
public class HistoriqueDaoImpl extends GenericDaoImpl implements HistoriqueDao {
#Override
public List<Historique> rechercher(String critere1, String critere2,
String critere3) {
Criteria crit = getSession().createCriteria(Historique.class);
crit.setFetchMode("pays", FetchMode.JOIN);
crit.createAlias("pays", "p");
crit.add(Expression.eq("p.libilePays", critere1));
crit.setFetchMode("ville", FetchMode.JOIN);
crit.createAlias("ville", "b");
crit.add(Expression.eq("b.libileVille", critere2));
crit.setFetchMode("distination", FetchMode.JOIN);
crit.createAlias("distination", "d");
crit.add(Expression.eq("d.libileDis", critere3));
List<Historique> his = crit.list();
return his;
}
#Override
public List<Historique> rechercher(String critere1, String critere2) {
// TODO Auto-generated method stub
Criteria crit = getSession().createCriteria(Historique.class);
crit.setFetchMode("pays", FetchMode.JOIN);
crit.createAlias("pays", "p");
crit.add(Expression.eq("p.libilePays", critere1));
crit.setFetchMode("ville", FetchMode.JOIN);
crit.createAlias("ville", "b");
crit.add(Expression.eq("b.libileVille", critere2));
List<Historique> his = crit.list();
return his;
}
#Override
public List<Historique> rechercher(String critere1) {
// TODO Auto-generated method stub
Criteria crit = getSession().createCriteria(Historique.class);
crit.setFetchMode("pays", FetchMode.JOIN);
crit.createAlias("pays", "p");
crit.add(Expression.eq("p.libilePays", critere1));
List<Historique> his = crit.list();
return his;
}
}
3.i use DaoSupport initialize in spring

The stack trace points to this line:
me.setLibileDistination(ct.getDistination().getLibileDis());
ct is a Historique proxy = a fake object whose fields (including the one you obtain through getDistination()) are not yet initiated.
Please initialize the fields you are interested in before committing the Hibernate transaction.
Hibernate.initialize(ct.getDistination());

crit.setFetchMode("pays", FetchMode.JOIN);
crit.setFetchMode("ville", FetchMode.JOIN);
crit.setFetchMode("distination", FetchMode.JOIN);
this the soultion of the problem cause i have in all the case to open the fetchmode.join

Related

NiFI "unable to find flowfile content"

I am using nifi 1.6 and get the following errors when trying to modify a clone of an incoming flowFile:
[1]"unable to find content for FlowFile: ... MissingFlowFileException
...
Caused by ContentNotFoundException: Could not find contetn for StandardClaim
...
Caused by java.io.EOFException: null"
[2]"FlowFileHandlingException: StandardFlowFileRecord... is not known in this session"
The first error occurs when trying to access the contents of the flow file, the second when removing the flow file from the session (within a catch of the first). This process is known to have worked under nifi 0.7.
The basic process is:
Clone the incoming flow file
Write to the clone
Write to the clone again (some additional formatting)
Repeat 1-3
The error occurs on the second iteration step 3.
An interesting point is that if immediately after the clone is performed, a session.read of the clone is done everything works fine. The read seems to reset some pointer.
I have created unit tests for this processor, but they do not fail in either case.
Below is code simplified from the actual version in use that demonstrates the issue. (The development system is not connected so I had to copy the code. Please forgive any typos - it should be close. This is also why a full stack trace is not provided.) The processor doing the work has a property to determine if an immediate read should be done, or not. So both scenarios can be performed easily. To set it up, all that is needed is a GetFile processor to supply the input and terminators for the output from the SampleCloningProcessor. A sample input file is included as well. The meat of the code is in the onTrigger and manipulate methods. The manipulation in this simplified version really don't do anything but copy the input to the output.
Any insights into why this is happening and suggestions for corrections will be appreciated - thanks.
SampleCloningProcessor.java
processor sample.package.cloning
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Reader;
import java.util.Arrays;
import java.util.Hashset;
import java.util.List;
import java.util.Scanner;
import java.util.Set;
import org.apache.commons.compress.utils.IOUtils;
import org.apache.nifi.annotation.documentaion.CapabilityDescription;
import org.apache.nifi.annotation.documentaion.Tags;
import org.apache.nifi.componets.PropertyDescriptor;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessorContext;
import org.apache.nifi.processor.ProcessorSession;
import org.apache.nifi.processor.ProcessorInitioalizationContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.InputStreamCalback;
import org.apache.nifi.processor.io.OutputStreamCalback;
import org.apache.nifi.processor.io.StreamCalback;
import org.apache.nifi.processor.util.StandardValidators;
import com.google.gson.Gson;
#Tags({"example", "clone"})
#CapabilityDescription("Demonsrates cloning of flowfile failure.")
public class SampleCloningProcessor extend AbstractProcessor {
/* Determines if an immediate read is performed after cloning of inoming flowfile. */
public static final PropertyDescriptor IMMEDIATE_READ = new PropertyDescriptor.Builder()
.name("immediateRead")
.description("Determines if processor runs successfully. If a read is done immediatly "
+ "after the clone of the incoming flowFile, then the processor should run successfully.")
.required(true)
.allowableValues("true", "false")
.defaultValue("true")
.addValidator(StandardValidators.BOLLEAN_VALIDATOR)
.build();
public static final Relationship SUCCESS = new Relationship.Builder().name("success").
description("No unexpected errors.").build();
public static final Relationship FAILURE = new Relationship.Builder().name("failure").
description("Errors were thrown.").build();
private Set<Relationship> relationships;
private List<PropertyDescriptors> properties;
#Override
public void init(final ProcessorInitializationContext contex) {
relationships = new HashSet<>(Arrays.asList(SUCCESS, FAILURE));
properties = new Arrays.asList(IMMEDIATE_READ);
}
#Override
public Set<Relationship> getRelationships() {
return this.relationships;
}
#Override
public List<PropertyDescriptor> getSuppprtedPropertyDescriptors() {
return this.properties;
}
#Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile incomingFlowFile = session.get();
if (incomingFlowFile == null) {
return;
}
try {
final InfileReader inFileReader = new InfileReader();
session.read(incomingFlowFile, inFileReader);
Product product = infileReader.getProduct();
boolean transfer = false;
getLogger().info("\tSession :\n" + session);
getLogger().info("\toriginal :\n" + incomingFlowFile);
for(int i = 0; i < 2; i++) {
transfer = manipulate(context, session, inclmingFlowFile, product);
}
} catch (Exception e) {
getLogger().error(e.getMessage(), e);
session.rollback(true);
}
}
private boolean manipuate(final ProcessContext context, final ProcessSession session
final FlowFile incomingFlowFile, final Product product) {
boolean transfer = false;
FlowFile outgoingFlowFile = null;
boolean immediateRead = context.getProperty(IMMEDIATE_READ).asBoolean();
try {
//Clone incoming flowFile
outgoinFlowFile = session.clone(incomingFlowFile);
getLogger().info("\tclone outgoing :\n" + outgoingFlowFile);
if(immediateRead) {
readFlowFile(session, outgoingFlowFile);
}
//First write into clone
StageOneWrite stage1Write = new StaeOneWrite(product);
outgoingFlowFile = session.write(outgoingFlowFile, stage1Write);
getLogger().info("\twrite outgoing :\n" + outgoingFlowFile);
// Format the cloned file with another write
outgoingFlowFile = formatFlowFile(outgoingFlowFile, session)
getLogger().info("\format outgoing :\n" + outgoingFlowFile);
session.transfer(outgoingFlowFile, SUCCESS);
transfer != true;
} catch(Exception e)
getLogger().error(e.getMessage(), e);
if(outgoingFlowFile ! = null) {
session.remove(outgoingFlowFile);
}
}
return transfer;
}
private void readFlowFile(fainl ProcessSession session, fianl Flowfile flowFile) {
session.read(flowFile, new InputStreamCallback() {
#Override
public void process(Final InputStream in) throws IOException {
try (Scanner scanner = new Scanner(in)) {
scanner.useDelimiter("\\A").next();
}
}
});
}
private FlowFile formatFlowFile(fainl ProcessSession session, FlowFile flowfile) {
OutputFormatWrite formatWrite = new OutputFormatWriter();
flowfile = session.write(flowFile, formatWriter);
return flowFile;
}
private static class OutputFormatWriter implement StreamCallback {
#Override
public void process(final InputStream in, final OutputStream out) throws IOException {
try {
IOUtils.copy(in. out);
out.flush();
} finally {
IOUtils.closeQuietly(in);
IOUtils.closeQuietly(out);
}
}
}
private static class StageOneWriter implements OutputStreamCallback {
private Product product = null;
public StageOneWriter(Produt product) {
this.product = product;
}
#Override
public void process(final OutputStream out) throws IOException {
final Gson gson = new Gson();
final String json = gson.toJson(product);
out.write(json.getBytes());
}
}
private static class InfileReader implements InputStreamCallback {
private Product product = null;
public StageOneWriter(Produt product) {
this.product = product;
}
#Override
public void process(final InputStream out) throws IOException {
product = null;
final Gson gson = new Gson();
Reader inReader = new InputStreamReader(in, "UTF-8");
product = gson.fromJson(inreader, Product.calss);
}
public Product getProduct() {
return product;
}
}
SampleCloningProcessorTest.java
package sample.processors.cloning;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.Before;
import org.junit.Test;
public class SampleCloningProcessorTest {
final satatic String flowFileContent = "{"
+ "\"cost\": \"cost 1\","
+ "\"description\": \"description","
+ "\"markup\": 1.2"
+ "\"name\":\"name 1\","
+ "\"supplier\":\"supplier 1\","
+ "}";
private TestRunner testRunner;
#Before
public void init() {
testRunner = TestRunner.newTestRunner(SampleCloningProcessor.class);
testRunner.enqueue(flowFileContent);
}
#Test
public void testProcessorImmediateRead() {
testRunner.setProperty(SampleCloningProcessor.IMMEDIATE_READ, "true");
testRunner.run();
testRinner.assertTransferCount("success", 2);
}
#Test
public void testProcessorImmediateRead_false() {
testRunner.setProperty(SampleCloningProcessor.IMMEDIATE_READ, "false");
testRunner.run();
testRinner.assertTransferCount("success", 2);
}
}
Product.java
package sample.processors.cloning;
public class Product {
private String name;
private String description;
private String supplier;
private String cost;
private float markup;
public String getName() {
return name;
}
public void setName(final String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescriptione(final String description) {
this.description = description;
}
public String getSupplier() {
return supplier;
}
public void setSupplier(final String supplier) {
this.supplier = supplier;
}
public String getCost() {
return cost;
}
public void setCost(final String cost) {
this.cost = cost;
}
public float getMarkup() {
return markup;
}
public void setMarkup(final float name) {
this.markup = markup;
}
}
product.json A sample input file.
{
"const" : "cost 1",
"description" : "description 1",
"markup" : 1.2,
"name" : "name 1",
"supplier" : "supplier 1"
}
Reported as a bug in Nifi. Being addressed by https://issues.apache.org/jira/browse/NIFI-5879

Confluent Kafka Avro deserializer for spring boot kafka listener

Does somebody implemented confluent-kafka messages deserializer to consume kafka messages by spring "#KafkaListener"-s ?
Here is my answer, which I've implemented based on: "io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer"
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Map;
import javax.xml.bind.DatatypeConverter;
import org.apache.avro.Schema;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.avro.specific.SpecificRecordBase;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Deserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AvroConfluentDeserializer<T extends SpecificRecordBase> implements Deserializer<T> {
private static final Logger LOG = LoggerFactory.getLogger(AvroConfluentDeserializer.class);
protected static final byte MAGIC_BYTE = 0x0;
protected static final int idSize = 4;
private final DecoderFactory decoderFactory = DecoderFactory.get();
protected final Class<T> targetType;
public AvroConfluentDeserializer(Class<T> targetType) {
this.targetType = targetType;
}
#Override
public void close() {
// No-op
}
#Override
public void configure(Map<String, ?> arg0, boolean arg1) {
// No-op
}
#Override
public T deserialize(String topic, byte[] data) {
try {
T result = null;
if (data != null) {
LOG.info("data='{}'", DatatypeConverter.printHexBinary(data));
result = (T) deserializePayload(data, targetType.newInstance().getSchema());
LOG.info("deserialized data='{}'", result);
}
return result;
} catch (Exception ex) {
throw new SerializationException(
"Can't deserialize data '" + Arrays.toString(data) + "' from topic '" + topic + "'", ex);
}
}
protected T deserializePayload(byte[] payload, Schema schema) throws SerializationException {
int id = -1;
try {
ByteBuffer buffer = getByteBuffer(payload);
id = buffer.getInt();
int length = buffer.limit() - 1 - idSize;
int start = buffer.position() + buffer.arrayOffset();
DatumReader<T> reader = new SpecificDatumReader<T>(schema);
return reader.read(null, decoderFactory.binaryDecoder(buffer.array(), start, length, null));
} catch (IOException | RuntimeException e) {
throw new SerializationException("Error deserializing Avro message for id " + id, e);
}
}
private ByteBuffer getByteBuffer(byte[] payload) {
ByteBuffer buffer = ByteBuffer.wrap(payload);
if (buffer.get() != MAGIC_BYTE) {
throw new SerializationException("Unknown magic byte!");
}
return buffer;
}
}

Recyclerview Filter not working.its not searching the elements

when i filter recyclerview it shows Not found .My Searchview not working.when i run the code its result in Not Found i think there is problem in onQueryTextChange
myfilter function also did not work
#Override
public boolean onQueryTextSubmit(String query) {
Toast.makeText(SecondActivity1.this, "Name is : " + query, Toast.LENGTH_SHORT).show();
return false;
}
#Override
public boolean onQueryTextChange(String newText) {
final List<DatabaseModel> filteredModelList = filter(dbList, newText);
if (filteredModelList.size() > 0) {
// Toast.makeText(SecondActivity1.this, "Found", Toast.LENGTH_SHORT).show();
recyclerAdapter.setFilter(filteredModelList);
return true;
} else {
Toast.makeText(SecondActivity1.this, "Not Found", Toast.LENGTH_SHORT).show();
return false;
}
private List filter(List models, String query) {
query = query.toLowerCase();
recyclerAdapter.notifyDataSetChanged();
final List<DatabaseModel> filteredModelList = new ArrayList<>();
// mRecyclerView.setLayoutManager(new LinearLayoutManager(SecondActivity1.this));
// mRecyclerView.setAdapter(RecyclerAdapter);
for (DatabaseModel model : models) {
final String text = model.getName().toLowerCase();
if (text.contains(query)) {
filteredModelList.add(model);
}
}
return filteredModelList;
//
}
here is filter method which recieve parameter(dblist,newtext) filter method recieves these method when i use toast its show that it takes newText But didnot filter this.i checked many sites but this is same in many sites points.when i enter name toast shows name which i enter but it did not filter
RecyclerAdapter.java
package com.example.prabhu.databasedemo;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Filter;
import android.widget.Filterable;
import android.widget.TextView;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.List;
/**
* Created by user_adnig on 11/14/15.
*/
public class RecyclerAdapter extends RecyclerView.Adapter<RecyclerAdapter.ViewHolder> {
List<DatabaseModel> dbList;
static Context context;
RecyclerAdapter(Context context, List<DatabaseModel> dbList ){
this.dbList = new ArrayList<>();
this.context = context;
this.dbList = (ArrayList<DatabaseModel>) dbList;
}
#Override
public RecyclerAdapter.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View itemLayoutView = LayoutInflater.from(parent.getContext()).inflate(
R.layout.item_row, null);
// create ViewHolder
ViewHolder viewHolder = new ViewHolder(itemLayoutView);
return viewHolder;
}
#Override
public void onBindViewHolder(RecyclerAdapter.ViewHolder holder, int position) {
holder.name.setText(dbList.get(position).getName());
holder.email.setText(dbList.get(position).getEmail());
}
#Override
public int getItemCount() {
return dbList.size();
}
public void setFilter(List<DatabaseModel> countryModels) {
// Toast.makeText(RecyclerAdapter.this,"Method is called", Toast.LENGTH_SHORT).show();
dbList = new ArrayList<>();
dbList.addAll(countryModels);
notifyDataSetChanged();
}
public static class ViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener {
public TextView name,email;
public ViewHolder(View itemLayoutView) {
super(itemLayoutView);
name = (TextView) itemLayoutView
.findViewById(R.id.rvname);
email = (TextView)itemLayoutView.findViewById(R.id.rvemail);
itemLayoutView.setOnClickListener(this);
}
#Override
public void onClick(View v) {
Intent intent = new Intent(context,DetailsActivity.class);
Bundle extras = new Bundle();
extras.putInt("position",getAdapterPosition());
intent.putExtras(extras);
/*
int i=getAdapterPosition();
intent.putExtra("position", getAdapterPosition());*/
context.startActivity(intent);
Toast.makeText(RecyclerAdapter.context, "you have clicked Row " + getAdapterPosition(), Toast.LENGTH_LONG).show();
}
}
}
this is my recyclerAdapterCode.i also used Recycleradapter.setFilter(filterModeList) method but it did not work for me.i think in my set filter method error which i did not solve yet.
. But when I clear the search widget I don't get the full list instead I get the empty RecyclerView.

hbase co-processor failing to load from shell

I am trying to add a coprocessor with one hbase table and it is failing with error -
2016-03-15 14:40:14,130 INFO org.apache.hadoop.hbase.regionserver.RSRpcServices: Open PRODUCT_DETAILS,,1457953190424.f687dd250bfd1f18ffbb8075fd625145.
2016-03-15 14:40:14,173 ERROR org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost: Failed to load coprocessor com.optymyze.coprocessors.ProductObserver
java.io.IOException: Failed on local exception: com.google.protobuf.InvalidProtocolBufferException: Protocol message end-group tag did not match expected tag.; Host Details : local host is: "mylocalhost/mylocalhostip"; destination host is: "mydestinationhost":9000;
at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:772)
to add co processor I did following -
hbase> disable 'PRODUCT_DETAILS'
hbase> alter 'PRODUCT_DETAILS', METHOD => 'table_att', 'coprocessor'=>'hdfs://mydestinationhost:9000/hbase-coprocessors-0.0.3-SNAPSHOT.jar|com.optymyze.coprocessors.ProductObserver|1001|arg1=1,arg2=2'
now enable 'PRODUCT_DETAILS' won't work.
co processor code is as follows-
package com.optymyze.coprocessors;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.slf4j.LoggerFactory.*;
/**
*
* Created by adnan on 14-03-2016.
*/
public class ProductObserver extends BaseRegionObserver {
private static final Logger LOGGER = getLogger(ProductObserver.class);
private static final String PRODUCT_DETAILS_TABLE = "PRODUCT_DETAILS";
public static final String COLUMN_FAMILY = "CF";
#Override
public void postPut(ObserverContext<RegionCoprocessorEnvironment> e, Put put, WALEdit edit, boolean writeToWAL) throws IOException {
List<KeyValue> kvs = put.getFamilyMap().get(Bytes.toBytes(COLUMN_FAMILY));
LOGGER.info("key values {}", kvs);
Map<String, Integer> qualifierVsValue = getMapForQualifierVsValuesForRequiredOnes(kvs);
LOGGER.info("qualifier values {}", qualifierVsValue);
List<Put> puts = createPuts(kvs, qualifierVsValue);
LOGGER.info("puts values {}", puts);
updateProductTable(e, puts);
LOGGER.info("puts done");
}
private void updateProductTable(ObserverContext<RegionCoprocessorEnvironment> e, List<Put> puts) throws IOException {
HTableInterface productTable = e.getEnvironment().getTable(Bytes.toBytes(PRODUCT_DETAILS_TABLE));
try {
productTable.put(puts);
}finally {
productTable.close();
}
}
private List<Put> createPuts(List<KeyValue> kvs, Map<String, Integer> qualifierVsValue) {
int salePrice, baseline = 0, finalPrice = 0;
List<Put> puts = new ArrayList<Put>(kvs.size());
for (KeyValue kv : kvs) {
if (kv.matchingQualifier(Bytes.toBytes("BASELINE"))) {
baseline = convertToZeroIfNull(qualifierVsValue, "PRICE")
- convertToZeroIfNull(qualifierVsValue, "PRICE")
* convertToZeroIfNull(qualifierVsValue, "DISCOUNT") / 100;
puts.add(newPut(kv, baseline));
}
if (kv.matchingQualifier(Bytes.toBytes("FINALPRICE"))) {
finalPrice = baseline + baseline * convertToZeroIfNull(qualifierVsValue, "UPLIFT") / 100;
puts.add(newPut(kv, finalPrice));
}
if (kv.matchingQualifier(Bytes.toBytes("SALEPRICE"))) {
salePrice = finalPrice * convertToZeroIfNull(qualifierVsValue, "VOLUME");
puts.add(newPut(kv, salePrice));
}
}
return puts;
}
private Map<String, Integer> getMapForQualifierVsValuesForRequiredOnes(List<KeyValue> kvs) {
Map<String, Integer> qualifierVsValue = new HashMap<String, Integer>();
for (KeyValue kv : kvs) {
getValueFromQualifier(kv, "PRICE", qualifierVsValue);
getValueFromQualifier(kv, "DISCOUNT", qualifierVsValue);
getValueFromQualifier(kv, "UPLIFT", qualifierVsValue);
getValueFromQualifier(kv, "VOLUME", qualifierVsValue);
}
return qualifierVsValue;
}
private Integer convertToZeroIfNull(Map<String, Integer> qualifierVsValue, String qualifier) {
Integer v = qualifierVsValue.get(qualifier);
return v == null ? 0 : v;
}
private void getValueFromQualifier(KeyValue kv, String qualifier, Map<String, Integer> qualifierVsValue) {
if (kv.matchingQualifier(Bytes.toBytes(qualifier))) {
qualifierVsValue.put(qualifier, Bytes.toInt(convertToByteZeroIfNull(kv)));
}
}
private Put newPut(KeyValue kv, int newVal) {
Put put = new Put(kv.getValue(), kv.getTimestamp());
put.add(kv.getFamily(), kv.getQualifier(), Bytes.toBytes(newVal));
return put;
}
private byte[] convertToByteZeroIfNull(KeyValue kv) {
return kv.getValue() == null ? Bytes.toBytes(0) : kv.getValue();
}
}

SearchView does not perform its function, how to filter this custom adapter?

I have created a custom adapter that extends ArrayAdapter containing Listview with two TextView in a single row. I have SearchView on the ActionBar.In onQueryTextChange(String s) i have tried some methods which i have found, works but result is not correct, in the end i have only first row from ListView. How to make SearchView to work correctly?
Everything else working fine. This app is for minSdkVersion="9" and above. Any suggestion will be OK. Regards.
import android.app.SearchManager;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.res.Resources;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.view.MenuItemCompat;
import android.support.v7.app.ActionBar;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.widget.SearchView;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.Filter;
import android.widget.ListView;
import android.widget.TextView;
public class A extends ActionBarActivity{
ListView list;
String[] titl;
String[] opis;
SearchView searchView;
VjuAdapter adapter;
#Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.activity);
ActionBar aB = getSupportActionBar();
aB.setDisplayHomeAsUpEnabled(true);
Resources res=getResources();
titl=res.getStringArray(R.array.naslov);
opis=res.getStringArray(R.array.podnaslov);
list=(ListView) findViewById(R.id.listView);
VjuAdapter adapter=new VjuAdapter(this, titl, opis);
adapter.getFilter().filter(null);
list.setAdapter(adapter);
list.setTextFilterEnabled(true);
}
#Override
protected void onPause() {
// TODO Auto-generated method stub
super.onPause();
}
#Override
protected void onDestroy() {
// TODO Auto-generated method stub
super.onDestroy();
finish();
}
#Override
public boolean onCreateOptionsMenu(Menu menu) {
// TODO Auto-generated method stub
MenuInflater men = getMenuInflater();
men.inflate(R.menu.main, menu);
SearchManager searchManager = (SearchManager) getSystemService(Context.SEARCH_SERVICE);
MenuItem searchItem = menu.findItem(R.id.trazi);
ComponentName cn = new ComponentName(this, A.class);
searchView = (SearchView) MenuItemCompat.getActionView(searchItem);
searchView.setSearchableInfo(searchManager.getSearchableInfo(cn));
//searchView.setOnQueryTextListener(this);
searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
#Override
public boolean onQueryTextSubmit(String arg0) {
// TODO Auto-generated method stub
return false;
}
#Override
public boolean onQueryTextChange(String s) {
// Here i have tried some methods which i have found
// this works but result is not correct, in the end i have
// only first row from ListView
/*if (TextUtils.isEmpty(s)) {
((LayoutInflater) list.getAdapter()).getFilter().filter(s);
} else {
adapter.getFilter().filter(s.toString());
}
return true;
}
});*/
//s.toLowerCase(Locale.getDefault());
VjuAdapter vd = (VjuAdapter)list.getAdapter();
Ll.m("POCETAK VD "+ vd);
Filter filter = vd.getFilter();
Ll.m("SRED VD.GET "+ vd.getFilter());
filter.filter(s);
Ll.m("KRAJ FILTER "+ filter);
/*if (TextUtils.isEmpty(s)) {
list.clearTextFilter();
} else {
//adapter.setSelectionAfterHeaderView();
adapter.getFilter().filter(s.toString());
}*/
return true;
}
});
return super.onCreateOptionsMenu(menu);
}
public boolean onOptionsItemSelected(MenuItem item) {
// TODO Auto-generated method stub
int id = item.getItemId();
/*if (id == R.id.trazi){
onSearchRequested();
}*/
if (id == android.R.id.home){
onBackPressed();
}
if (id == R.id.about){
Intent i = new Intent("com.kanna.sanjarica.ABOUT");
startActivity(i);
}
if (id == R.id.oceni_apl){
Uri uri = Uri.parse("market://details?id=" + getApplicationContext().getPackageName());
Intent goToMarket = new Intent(Intent.ACTION_VIEW, uri);
startActivity(goToMarket);
}
if (id == R.id.kontakt){
String mailTo="kanjah77#gmail.com";
Intent email_intent = new Intent(Intent.ACTION_SENDTO, Uri.fromParts("mailto",mailTo, null));
email_intent.putExtra(android.content.Intent.EXTRA_SUBJECT, "");
email_intent.putExtra(android.content.Intent.EXTRA_TEXT,"");
startActivity(Intent.createChooser(email_intent, "PoĊĦalji email..."));
}
return true;
}
}
class VjuAdapter extends ArrayAdapter<String>{
Context context;
String [] titlArray;
String [] opisArray;
public VjuAdapter(Context c, String[] naslov, String[] podnaslov ) {
super(c,R.layout.single_row,R.id.textView1,naslov);
this.context=c;
this.titlArray=naslov;
this.opisArray=podnaslov;
}
class MyViewHolder{
TextView textVel;
TextView textMal;
MyViewHolder(View v){
textVel=(TextView) v.findViewById(R.id.textView1);
textMal=(TextView) v.findViewById(R.id.textView2);
}
}
#Override
public View getView(int position, View convertView, ViewGroup parent) {
View row=convertView;
MyViewHolder holder=null;
if(row==null){
LayoutInflater inf=(LayoutInflater)context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
row=inf.inflate(R.layout.single_row, parent, false);
holder=new MyViewHolder(row);
row.setTag(holder);
}else{
holder=(MyViewHolder) row.getTag();
}
holder.textVel.setText(titlArray[position]);
holder.textMal.setText(opisArray[position]);
return row;
}
}
This is from http://developer.android.com/:
A concrete BaseAdapter that is backed by an array of arbitrary
objects. By default this class expects that the provided resource id
references a single TextView. If you want to use a more complex
layout, use the constructors that also takes a field id. That field id
should reference a TextView in the larger layout resource.
You have 2 TextView, and constructor class is this:
ArrayAdapter(Context context, int resource, int textViewResourceId)

Resources