Confluent Kafka Avro deserializer for spring boot kafka listener - spring

Does somebody implemented confluent-kafka messages deserializer to consume kafka messages by spring "#KafkaListener"-s ?

Here is my answer, which I've implemented based on: "io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer"
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Map;
import javax.xml.bind.DatatypeConverter;
import org.apache.avro.Schema;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.avro.specific.SpecificRecordBase;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Deserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AvroConfluentDeserializer<T extends SpecificRecordBase> implements Deserializer<T> {
private static final Logger LOG = LoggerFactory.getLogger(AvroConfluentDeserializer.class);
protected static final byte MAGIC_BYTE = 0x0;
protected static final int idSize = 4;
private final DecoderFactory decoderFactory = DecoderFactory.get();
protected final Class<T> targetType;
public AvroConfluentDeserializer(Class<T> targetType) {
this.targetType = targetType;
}
#Override
public void close() {
// No-op
}
#Override
public void configure(Map<String, ?> arg0, boolean arg1) {
// No-op
}
#Override
public T deserialize(String topic, byte[] data) {
try {
T result = null;
if (data != null) {
LOG.info("data='{}'", DatatypeConverter.printHexBinary(data));
result = (T) deserializePayload(data, targetType.newInstance().getSchema());
LOG.info("deserialized data='{}'", result);
}
return result;
} catch (Exception ex) {
throw new SerializationException(
"Can't deserialize data '" + Arrays.toString(data) + "' from topic '" + topic + "'", ex);
}
}
protected T deserializePayload(byte[] payload, Schema schema) throws SerializationException {
int id = -1;
try {
ByteBuffer buffer = getByteBuffer(payload);
id = buffer.getInt();
int length = buffer.limit() - 1 - idSize;
int start = buffer.position() + buffer.arrayOffset();
DatumReader<T> reader = new SpecificDatumReader<T>(schema);
return reader.read(null, decoderFactory.binaryDecoder(buffer.array(), start, length, null));
} catch (IOException | RuntimeException e) {
throw new SerializationException("Error deserializing Avro message for id " + id, e);
}
}
private ByteBuffer getByteBuffer(byte[] payload) {
ByteBuffer buffer = ByteBuffer.wrap(payload);
if (buffer.get() != MAGIC_BYTE) {
throw new SerializationException("Unknown magic byte!");
}
return buffer;
}
}

Related

How to implement a worker thread that will process Kinesis records and update GUI in javaFx?

I'm working on a micro-services monitoring app.
My app supposes to update a GUI accordingly when receiving a new consumed
record, meaning:
When I receive a new record:
1)I check if the request it represents is a part of a legal flow, and
if that flow already has representation in the GUI.
By representation, I mean a set of circles that represent the full flow.
For example, if I get a transaction (MS1 received request) a legal flow num 1: that is MS1 to MS2 to MS3, so my GUI will add a table column with 2 grey circles: MS1 to MS2 and MS2 to MS3. Next, when a record: MS2 received from
MS1 is consumed I will paint the first circle green and so on.
My problem is:
I don't understand how to "tap into" Amazon's KCL code (presented here).
meaning, I don't know how to make that a consumed record will trigger event in my JavaFX GUI that will update the GUI accordingly.
Help would be much appreciated!
package com.kinesisdataconsumer;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import com.DATA_STATUS;
import com.DataBase;
import com.MonitoringLogicImpl;
import com.kinesisdataproducer.Producer;
import com.Transaction;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessor;
import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessorCheckpointer;
import com.amazonaws.services.kinesis.clientlibrary.interfaces.IRecordProcessorFactory;
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.InitialPositionInStream;
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.KinesisClientLibConfiguration;
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.Worker;
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.ShutdownReason;
import com.amazonaws.services.kinesis.model.Record;
public class Consumer implements IRecordProcessorFactory {
private static final Logger log = LoggerFactory.getLogger(Consumer.class);
public DataBase dataBase;
public ArrayList<Transaction> transactionList;
public MonitoringLogicImpl monitoringLogic;
private final AtomicLong largestTimestamp = new AtomicLong(0);
private final List<Long> sequenceNumbers = new ArrayList<>();
private final Object lock = new Object();
public Consumer(DataBase database, ArrayList<Transaction> transactions, MonitoringLogicImpl monitoringLogicImplementation){
dataBase = database;
transactionList = transactions;
monitoringLogic = monitoringLogicImplementation;
}
private class RecordProcessor implements IRecordProcessor {
#Override
public void initialize(String shardId) {}
#Override
public void processRecords(List<Record> records, IRecordProcessorCheckpointer checkpointer) {
long timestamp = 0;
List<Long> seqNos = new ArrayList<>();
for (Record r : records) {
timestamp = Math.max(timestamp, Long.parseLong(r.getPartitionKey()));
try {
byte[] b = new byte[r.getData().remaining()];
r.getData().get(b);
seqNos.add(Long.parseLong(new String(b, "UTF-8").split("#")[0]));
//this thread adds the transaction to the DB
Thread addTransactionToDBThread = new Thread() {
public void run() {
try {
JSONObject jsonObj = new JSONObject(new String(b, "UTF-8").split("#")[1]);
Transaction transaction = Transaction.convertJsonToTransaction(jsonObj);
//add the transaction to the database
dataBase.addTransactionToDB(transaction);
//update the user-interface about the last transaction in the system
DATA_STATUS transactionStatus = monitoringLogic.getStatus(transaction);
monitoringLogic.updateUI(transaction.getUuid(), transaction.getSender(), transaction.getReceiver(), transactionStatus);
Thread.sleep(1000);
} catch(InterruptedException e) {
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
};
addTransactionToDBThread.start();
} catch (Exception e) {
log.error("Error parsing record", e);
System.exit(1);
}
}
synchronized (lock) {
if (largestTimestamp.get() < timestamp) {
log.info(String.format(
"Found new larger timestamp: %d (was %d), clearing state",
timestamp, largestTimestamp.get()));
largestTimestamp.set(timestamp);
sequenceNumbers.clear();
}
// Only add to the shared list if our data is from the latest run.
if (largestTimestamp.get() == timestamp) {
sequenceNumbers.addAll(seqNos);
Collections.sort(sequenceNumbers);
}
}
try {
checkpointer.checkpoint();
} catch (Exception e) {
log.error("Error while trying to checkpoint during ProcessRecords", e);
}
}
#Override
public void shutdown(IRecordProcessorCheckpointer checkpointer, ShutdownReason reason) {
log.info("Shutting down, reason: " + reason);
try {
checkpointer.checkpoint();
} catch (Exception e) {
log.error("Error while trying to checkpoint during Shutdown", e);
}
}
}
/**
* Log a message indicating the current state.
*/
public void logResults() {
synchronized (lock) {
if (largestTimestamp.get() == 0) {
return;
}
if (sequenceNumbers.size() == 0) {
log.info("No sequence numbers found for current run.");
return;
}
// The producer assigns sequence numbers starting from 1, so we
// start counting from one before that, i.e. 0.
long last = 0;
long gaps = 0;
for (long sn : sequenceNumbers) {
if (sn - last > 1) {
gaps++;
}
last = sn;
}
log.info(String.format(
"Found %d gaps in the sequence numbers. Lowest seen so far is %d, highest is %d",
gaps, sequenceNumbers.get(0), sequenceNumbers.get(sequenceNumbers.size() - 1)));
}
}
#Override
public IRecordProcessor createProcessor() {
return this.new RecordProcessor();
}
public void consumeData() {
KinesisClientLibConfiguration config =
new KinesisClientLibConfiguration(
"KinesisProducerLibSampleConsumer",
Producer.STREAM_NAME,
new DefaultAWSCredentialsProviderChain(),
"KinesisProducerLibSampleConsumer")
.withRegionName(Producer.REGION)
.withInitialPositionInStream(InitialPositionInStream.LATEST);
final Consumer consumer = new Consumer(dataBase, transactionList, monitoringLogic);
Executors.newScheduledThreadPool(1).scheduleAtFixedRate(new Runnable() {
#Override
public void run() {
consumer.logResults();
}
}, 10, 1, TimeUnit.SECONDS);
new Worker.Builder()
.recordProcessorFactory(consumer)
.config(config)
.build()
.run();
}
}

NiFI "unable to find flowfile content"

I am using nifi 1.6 and get the following errors when trying to modify a clone of an incoming flowFile:
[1]"unable to find content for FlowFile: ... MissingFlowFileException
...
Caused by ContentNotFoundException: Could not find contetn for StandardClaim
...
Caused by java.io.EOFException: null"
[2]"FlowFileHandlingException: StandardFlowFileRecord... is not known in this session"
The first error occurs when trying to access the contents of the flow file, the second when removing the flow file from the session (within a catch of the first). This process is known to have worked under nifi 0.7.
The basic process is:
Clone the incoming flow file
Write to the clone
Write to the clone again (some additional formatting)
Repeat 1-3
The error occurs on the second iteration step 3.
An interesting point is that if immediately after the clone is performed, a session.read of the clone is done everything works fine. The read seems to reset some pointer.
I have created unit tests for this processor, but they do not fail in either case.
Below is code simplified from the actual version in use that demonstrates the issue. (The development system is not connected so I had to copy the code. Please forgive any typos - it should be close. This is also why a full stack trace is not provided.) The processor doing the work has a property to determine if an immediate read should be done, or not. So both scenarios can be performed easily. To set it up, all that is needed is a GetFile processor to supply the input and terminators for the output from the SampleCloningProcessor. A sample input file is included as well. The meat of the code is in the onTrigger and manipulate methods. The manipulation in this simplified version really don't do anything but copy the input to the output.
Any insights into why this is happening and suggestions for corrections will be appreciated - thanks.
SampleCloningProcessor.java
processor sample.package.cloning
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Reader;
import java.util.Arrays;
import java.util.Hashset;
import java.util.List;
import java.util.Scanner;
import java.util.Set;
import org.apache.commons.compress.utils.IOUtils;
import org.apache.nifi.annotation.documentaion.CapabilityDescription;
import org.apache.nifi.annotation.documentaion.Tags;
import org.apache.nifi.componets.PropertyDescriptor;
import org.apache.nifi.flowfile.FlowFile;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessorContext;
import org.apache.nifi.processor.ProcessorSession;
import org.apache.nifi.processor.ProcessorInitioalizationContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.io.InputStreamCalback;
import org.apache.nifi.processor.io.OutputStreamCalback;
import org.apache.nifi.processor.io.StreamCalback;
import org.apache.nifi.processor.util.StandardValidators;
import com.google.gson.Gson;
#Tags({"example", "clone"})
#CapabilityDescription("Demonsrates cloning of flowfile failure.")
public class SampleCloningProcessor extend AbstractProcessor {
/* Determines if an immediate read is performed after cloning of inoming flowfile. */
public static final PropertyDescriptor IMMEDIATE_READ = new PropertyDescriptor.Builder()
.name("immediateRead")
.description("Determines if processor runs successfully. If a read is done immediatly "
+ "after the clone of the incoming flowFile, then the processor should run successfully.")
.required(true)
.allowableValues("true", "false")
.defaultValue("true")
.addValidator(StandardValidators.BOLLEAN_VALIDATOR)
.build();
public static final Relationship SUCCESS = new Relationship.Builder().name("success").
description("No unexpected errors.").build();
public static final Relationship FAILURE = new Relationship.Builder().name("failure").
description("Errors were thrown.").build();
private Set<Relationship> relationships;
private List<PropertyDescriptors> properties;
#Override
public void init(final ProcessorInitializationContext contex) {
relationships = new HashSet<>(Arrays.asList(SUCCESS, FAILURE));
properties = new Arrays.asList(IMMEDIATE_READ);
}
#Override
public Set<Relationship> getRelationships() {
return this.relationships;
}
#Override
public List<PropertyDescriptor> getSuppprtedPropertyDescriptors() {
return this.properties;
}
#Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile incomingFlowFile = session.get();
if (incomingFlowFile == null) {
return;
}
try {
final InfileReader inFileReader = new InfileReader();
session.read(incomingFlowFile, inFileReader);
Product product = infileReader.getProduct();
boolean transfer = false;
getLogger().info("\tSession :\n" + session);
getLogger().info("\toriginal :\n" + incomingFlowFile);
for(int i = 0; i < 2; i++) {
transfer = manipulate(context, session, inclmingFlowFile, product);
}
} catch (Exception e) {
getLogger().error(e.getMessage(), e);
session.rollback(true);
}
}
private boolean manipuate(final ProcessContext context, final ProcessSession session
final FlowFile incomingFlowFile, final Product product) {
boolean transfer = false;
FlowFile outgoingFlowFile = null;
boolean immediateRead = context.getProperty(IMMEDIATE_READ).asBoolean();
try {
//Clone incoming flowFile
outgoinFlowFile = session.clone(incomingFlowFile);
getLogger().info("\tclone outgoing :\n" + outgoingFlowFile);
if(immediateRead) {
readFlowFile(session, outgoingFlowFile);
}
//First write into clone
StageOneWrite stage1Write = new StaeOneWrite(product);
outgoingFlowFile = session.write(outgoingFlowFile, stage1Write);
getLogger().info("\twrite outgoing :\n" + outgoingFlowFile);
// Format the cloned file with another write
outgoingFlowFile = formatFlowFile(outgoingFlowFile, session)
getLogger().info("\format outgoing :\n" + outgoingFlowFile);
session.transfer(outgoingFlowFile, SUCCESS);
transfer != true;
} catch(Exception e)
getLogger().error(e.getMessage(), e);
if(outgoingFlowFile ! = null) {
session.remove(outgoingFlowFile);
}
}
return transfer;
}
private void readFlowFile(fainl ProcessSession session, fianl Flowfile flowFile) {
session.read(flowFile, new InputStreamCallback() {
#Override
public void process(Final InputStream in) throws IOException {
try (Scanner scanner = new Scanner(in)) {
scanner.useDelimiter("\\A").next();
}
}
});
}
private FlowFile formatFlowFile(fainl ProcessSession session, FlowFile flowfile) {
OutputFormatWrite formatWrite = new OutputFormatWriter();
flowfile = session.write(flowFile, formatWriter);
return flowFile;
}
private static class OutputFormatWriter implement StreamCallback {
#Override
public void process(final InputStream in, final OutputStream out) throws IOException {
try {
IOUtils.copy(in. out);
out.flush();
} finally {
IOUtils.closeQuietly(in);
IOUtils.closeQuietly(out);
}
}
}
private static class StageOneWriter implements OutputStreamCallback {
private Product product = null;
public StageOneWriter(Produt product) {
this.product = product;
}
#Override
public void process(final OutputStream out) throws IOException {
final Gson gson = new Gson();
final String json = gson.toJson(product);
out.write(json.getBytes());
}
}
private static class InfileReader implements InputStreamCallback {
private Product product = null;
public StageOneWriter(Produt product) {
this.product = product;
}
#Override
public void process(final InputStream out) throws IOException {
product = null;
final Gson gson = new Gson();
Reader inReader = new InputStreamReader(in, "UTF-8");
product = gson.fromJson(inreader, Product.calss);
}
public Product getProduct() {
return product;
}
}
SampleCloningProcessorTest.java
package sample.processors.cloning;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.Before;
import org.junit.Test;
public class SampleCloningProcessorTest {
final satatic String flowFileContent = "{"
+ "\"cost\": \"cost 1\","
+ "\"description\": \"description","
+ "\"markup\": 1.2"
+ "\"name\":\"name 1\","
+ "\"supplier\":\"supplier 1\","
+ "}";
private TestRunner testRunner;
#Before
public void init() {
testRunner = TestRunner.newTestRunner(SampleCloningProcessor.class);
testRunner.enqueue(flowFileContent);
}
#Test
public void testProcessorImmediateRead() {
testRunner.setProperty(SampleCloningProcessor.IMMEDIATE_READ, "true");
testRunner.run();
testRinner.assertTransferCount("success", 2);
}
#Test
public void testProcessorImmediateRead_false() {
testRunner.setProperty(SampleCloningProcessor.IMMEDIATE_READ, "false");
testRunner.run();
testRinner.assertTransferCount("success", 2);
}
}
Product.java
package sample.processors.cloning;
public class Product {
private String name;
private String description;
private String supplier;
private String cost;
private float markup;
public String getName() {
return name;
}
public void setName(final String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescriptione(final String description) {
this.description = description;
}
public String getSupplier() {
return supplier;
}
public void setSupplier(final String supplier) {
this.supplier = supplier;
}
public String getCost() {
return cost;
}
public void setCost(final String cost) {
this.cost = cost;
}
public float getMarkup() {
return markup;
}
public void setMarkup(final float name) {
this.markup = markup;
}
}
product.json A sample input file.
{
"const" : "cost 1",
"description" : "description 1",
"markup" : 1.2,
"name" : "name 1",
"supplier" : "supplier 1"
}
Reported as a bug in Nifi. Being addressed by https://issues.apache.org/jira/browse/NIFI-5879

logback dbappender very slow

I have a custom DB Appender which extends the logback DBAppender. My application has 100 parallel threads. These threads also log the events into database through the custom db appender while processing the requests.
The performance of the logback is so slow that with the logs disabled, the execution finishes in 20 mins. Whereas with the logs enabled, it takes almost 5 hours to finish most of the time waiting for the logs to be written to proceed to the next step as my logging is synchronous.
At first I thought the slow performance could be due to the synchronized doAppend method of logback appender. However, I later noticed that DB appender extends the
UnsynchronizedAppenderBase(public abstract class DBAppenderBase<E>
/* */ extends UnsynchronizedAppenderBase<E>).
Can anyone help me understand what could be the reason for this poor performance?
My DB Appender:
package com.test;
import java.io.StringReader;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;
import org.omg.CORBA.TRANSACTION_UNAVAILABLE;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ch.qos.logback.classic.db.DBAppender;
import ch.qos.logback.classic.spi.ILoggingEvent;
public class CustomDBAppender extends DBAppender {
final static Logger logger = LoggerFactory.getLogger("com.test");
protected String insertPropertiesSQL;
protected String insertExceptionSQL;
protected String insertSQL;
private CustomDBNameResolver dbNameResolver;
// default constructor
public CustomDBAppender() {
logger.debug("DB Appender instantiated");
}
// set dbNameResolver for getting table information
public void setCsiDbNameResolver(CustomDBNameResolver dbNameResolver) {
this.dbNameResolver = dbNameResolver;
}
#Override
public void start() {
super.start();
if (dbNameResolver == null)
dbNameResolver = new CustomDBNameResolver();
insertSQL = CustomSQLBuilder.buildInsertSQL(dbNameResolver);
}
#Override
protected void subAppend(ILoggingEvent event, Connection connection, PreparedStatement insertStatement)
throws Throwable {
logEvents(insertStatement, event);
// System.out.println("=== INSERT STATEMENT === " +
// insertStatement.toString());
int updateCount = -1;
try {
updateCount = insertStatement.executeUpdate();
} catch (Exception e) {
logger.error(" ERROR ");
e.printStackTrace();
}
logger.info(" updateCount = " + updateCount);
if (updateCount != 1) {
logger.error(" ERROR IT IS ");
addWarn("Failed to insert loggingEvent");
}
}
protected void secondarySubAppend(ILoggingEvent event, Connection connection, long eventId) throws Throwable {
}
// #override
private void logEvents(PreparedStatement stmt, ILoggingEvent event) throws SQLException {
logData(event, stmt);
}
private void logData(ILoggingEvent event, PreparedStatement stmt) throws SQLException {
try {
if (null != event && null != logMsgArgs && (event.getLoggerName().indexOf("com.test.beans.") != -1)) {
payload = null == logMsgArgs[0] ? "NA"
: (logMsgArgs[0].toString()).substring(0,
(logMsgArgs[0].toString().length() >= 4000 ? 3999 : logMsgArgs[0].toString().length()));
msgHeader = null == logMsgArgs[1] ? "NA"
: (logMsgArgs[1].toString()).substring(0,
(logMsgArgs[1].toString().length() >= 4000 ? 3999 : logMsgArgs[1].toString().length()));
currentStage = null == logMsgArgs[2] ? -1 : Integer.parseInt(logMsgArgs[2].toString());
currentEvent = null == logMsgArgs[3] ? null : logMsgArgs[3].toString();
trxId = null == logMsgArgs[4] ? null : logMsgArgs[4].toString();
batchId = null == logMsgArgs[5] ? null : logMsgArgs[5].toString();
headerId = null == logMsgArgs[6] ? null : logMsgArgs[6].toString();
}
stmt.setString(1, logLevel);
stmt.setObject(2, trxId);
stmt.setString(3, logMessage);
stmt.setDate(4, getSqlDate());
stmt.setString(5, event.getLoggerName());
stmt.setString(6, event.getThreadName());
stmt.setString(7, event.getLoggerContextVO().getName());
stmt.setString(8, logMessage.substring(0, logMessageLength));
stmt.setString(9, payload);
stmt.setString(10, msgHeader);
stmt.setInt(11, currentStage);
stmt.setString(12, currentEvent);
stmt.setString(13, batchId);
stmt.setString(14, headerId);
} catch (Exception e) {
// TODO Auto-generated catch block
System.out.println("Exception while printing log:" + e.getMessage());
e.printStackTrace();
} finally {
}
}
private Object nvl(Object input) {
if (null == input) {
return null;
}
return input;
}
private java.sql.Date getSqlDate() {
Date now = new Date();
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSSX");
String nowDateStr = sdf.format(now);
logger.info("nowDateStr = " + nowDateStr);
java.sql.Date toDB = null;
try {
toDB = new java.sql.Date(sdf.parse(nowDateStr).getTime());
logger.info("toDB = " + toDB);
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return toDB;
}
#Override
protected String getInsertSQL() {
return insertSQL;
}
protected void insertProperties(Map<String, String> mergedMap, Connection connection, long eventId)
throws SQLException {
}
}
/* End of appender */

Apache CXF Interceptors: Unable to modify the response Stream in a Out Interceptor [duplicate]

I would like to modify an outgoing SOAP Request.
I would like to remove 2 xml nodes from the Envelope's body.
I managed to set up an Interceptor and get the generated String value of the message set to the endpoint.
However, the following code does not seem to work as the outgoing message is not edited as expected. Does anyone have some code or ideas on how to do this?
public class MyOutInterceptor extends AbstractSoapInterceptor {
public MyOutInterceptor() {
super(Phase.SEND);
}
public void handleMessage(SoapMessage message) throws Fault {
// Get message content for dirty editing...
StringWriter writer = new StringWriter();
CachedOutputStream cos = (CachedOutputStream)message.getContent(OutputStream.class);
InputStream inputStream = cos.getInputStream();
IOUtils.copy(inputStream, writer, "UTF-8");
String content = writer.toString();
// remove the substrings from envelope...
content = content.replace("<idJustification>0</idJustification>", "");
content = content.replace("<indicRdv>false</indicRdv>", "");
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
outputStream.write(content.getBytes(Charset.forName("UTF-8")));
message.setContent(OutputStream.class, outputStream);
}
Based on the first comment, I created an abstract class which can easily be used to change the whole soap envelope.
Just in case someone wants a ready-to-use code part.
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import org.apache.commons.io.IOUtils;
import org.apache.cxf.binding.soap.interceptor.SoapPreProtocolOutInterceptor;
import org.apache.cxf.io.CachedOutputStream;
import org.apache.cxf.message.Message;
import org.apache.cxf.phase.AbstractPhaseInterceptor;
import org.apache.cxf.phase.Phase;
import org.apache.log4j.Logger;
/**
* http://www.mastertheboss.com/jboss-web-services/apache-cxf-interceptors
* http://stackoverflow.com/questions/6915428/how-to-modify-the-raw-xml-message-of-an-outbound-cxf-request
*
*/
public abstract class MessageChangeInterceptor extends AbstractPhaseInterceptor<Message> {
public MessageChangeInterceptor() {
super(Phase.PRE_STREAM);
addBefore(SoapPreProtocolOutInterceptor.class.getName());
}
protected abstract Logger getLogger();
protected abstract String changeOutboundMessage(String currentEnvelope);
protected abstract String changeInboundMessage(String currentEnvelope);
public void handleMessage(Message message) {
boolean isOutbound = false;
isOutbound = message == message.getExchange().getOutMessage()
|| message == message.getExchange().getOutFaultMessage();
if (isOutbound) {
OutputStream os = message.getContent(OutputStream.class);
CachedStream cs = new CachedStream();
message.setContent(OutputStream.class, cs);
message.getInterceptorChain().doIntercept(message);
try {
cs.flush();
IOUtils.closeQuietly(cs);
CachedOutputStream csnew = (CachedOutputStream) message.getContent(OutputStream.class);
String currentEnvelopeMessage = IOUtils.toString(csnew.getInputStream(), "UTF-8");
csnew.flush();
IOUtils.closeQuietly(csnew);
if (getLogger().isDebugEnabled()) {
getLogger().debug("Outbound message: " + currentEnvelopeMessage);
}
String res = changeOutboundMessage(currentEnvelopeMessage);
if (res != null) {
if (getLogger().isDebugEnabled()) {
getLogger().debug("Outbound message has been changed: " + res);
}
}
res = res != null ? res : currentEnvelopeMessage;
InputStream replaceInStream = IOUtils.toInputStream(res, "UTF-8");
IOUtils.copy(replaceInStream, os);
replaceInStream.close();
IOUtils.closeQuietly(replaceInStream);
os.flush();
message.setContent(OutputStream.class, os);
IOUtils.closeQuietly(os);
} catch (IOException ioe) {
getLogger().warn("Unable to perform change.", ioe);
throw new RuntimeException(ioe);
}
} else {
try {
InputStream is = message.getContent(InputStream.class);
String currentEnvelopeMessage = IOUtils.toString(is, "UTF-8");
IOUtils.closeQuietly(is);
if (getLogger().isDebugEnabled()) {
getLogger().debug("Inbound message: " + currentEnvelopeMessage);
}
String res = changeInboundMessage(currentEnvelopeMessage);
if (res != null) {
if (getLogger().isDebugEnabled()) {
getLogger().debug("Inbound message has been changed: " + res);
}
}
res = res != null ? res : currentEnvelopeMessage;
is = IOUtils.toInputStream(res, "UTF-8");
message.setContent(InputStream.class, is);
IOUtils.closeQuietly(is);
} catch (IOException ioe) {
getLogger().warn("Unable to perform change.", ioe);
throw new RuntimeException(ioe);
}
}
}
public void handleFault(Message message) {
}
private class CachedStream extends CachedOutputStream {
public CachedStream() {
super();
}
protected void doFlush() throws IOException {
currentStream.flush();
}
protected void doClose() throws IOException {
}
protected void onWrite() throws IOException {
}
}
}
I had this problem as well today. After much weeping and gnashing of teeth, I was able to alter the StreamInterceptor class in the configuration_interceptor demo that comes with the CXF source:
OutputStream os = message.getContent(OutputStream.class);
CachedStream cs = new CachedStream();
message.setContent(OutputStream.class, cs);
message.getInterceptorChain().doIntercept(message);
try {
cs.flush();
CachedOutputStream csnew = (CachedOutputStream) message.getContent(OutputStream.class);
String soapMessage = IOUtils.toString(csnew.getInputStream());
...
The soapMessage variable will contain the complete SOAP message. You should be able to manipulate the soap message, flush it to an output stream and do a message.setContent(OutputStream.class... call to put your modifications on the message. This comes with no warranty, since I'm pretty new to CXF myself!
Note: CachedStream is a private class in the StreamInterceptor class. Don't forget to configure your interceptor to run in the PRE_STREAM phase so that the SOAP interceptors have a chance to write the SOAP message.
Following is able to bubble up server side exceptions. Use of os.close() instead of IOUtils.closeQuietly(os) in previous solution is also able to bubble up exceptions.
public class OutInterceptor extends AbstractPhaseInterceptor<Message> {
public OutInterceptor() {
super(Phase.PRE_STREAM);
addBefore(StaxOutInterceptor.class.getName());
}
public void handleMessage(Message message) {
OutputStream os = message.getContent(OutputStream.class);
CachedOutputStream cos = new CachedOutputStream();
message.setContent(OutputStream.class, cos);
message.getInterceptorChain.aad(new PDWSOutMessageChangingInterceptor(os));
}
}
public class OutMessageChangingInterceptor extends AbstractPhaseInterceptor<Message> {
private OutputStream os;
public OutMessageChangingInterceptor(OutputStream os){
super(Phase.PRE_STREAM_ENDING);
addAfter(StaxOutEndingInterceptor.class.getName());
this.os = os;
}
public void handleMessage(Message message) {
try {
CachedOutputStream csnew = (CachedOutputStream) message .getContent(OutputStream.class);
String currentEnvelopeMessage = IOUtils.toString( csnew.getInputStream(), (String) message.get(Message.ENCODING));
csnew.flush();
IOUtils.closeQuietly(csnew);
String res = changeOutboundMessage(currentEnvelopeMessage);
res = res != null ? res : currentEnvelopeMessage;
InputStream replaceInStream = IOUtils.tolnputStream(res, (String) message.get(Message.ENCODING));
IOUtils.copy(replaceInStream, os);
replaceInStream.close();
IOUtils.closeQuietly(replaceInStream);
message.setContent(OutputStream.class, os);
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
}
Good example for replacing outbound soap content based on this
package kz.bee.bip;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import org.apache.commons.io.IOUtils;
import org.apache.cxf.binding.soap.interceptor.SoapPreProtocolOutInterceptor;
import org.apache.cxf.io.CachedOutputStream;
import org.apache.cxf.message.Message;
import org.apache.cxf.phase.AbstractPhaseInterceptor;
import org.apache.cxf.phase.Phase;
public class SOAPOutboundInterceptor extends AbstractPhaseInterceptor<Message> {
public SOAPOutboundInterceptor() {
super(Phase.PRE_STREAM);
addBefore(SoapPreProtocolOutInterceptor.class.getName());
}
public void handleMessage(Message message) {
boolean isOutbound = false;
isOutbound = message == message.getExchange().getOutMessage()
|| message == message.getExchange().getOutFaultMessage();
if (isOutbound) {
OutputStream os = message.getContent(OutputStream.class);
CachedStream cs = new CachedStream();
message.setContent(OutputStream.class, cs);
message.getInterceptorChain().doIntercept(message);
try {
cs.flush();
IOUtils.closeQuietly(cs);
CachedOutputStream csnew = (CachedOutputStream) message.getContent(OutputStream.class);
String currentEnvelopeMessage = IOUtils.toString(csnew.getInputStream(), "UTF-8");
csnew.flush();
IOUtils.closeQuietly(csnew);
/* here we can set new data instead of currentEnvelopeMessage*/
InputStream replaceInStream = IOUtils.toInputStream(currentEnvelopeMessage, "UTF-8");
IOUtils.copy(replaceInStream, os);
replaceInStream.close();
IOUtils.closeQuietly(replaceInStream);
os.flush();
message.setContent(OutputStream.class, os);
IOUtils.closeQuietly(os);
} catch (IOException ioe) {
ioe.printStackTrace();
}
}
}
public void handleFault(Message message) {
}
private static class CachedStream extends CachedOutputStream {
public CachedStream() {
super();
}
protected void doFlush() throws IOException {
currentStream.flush();
}
protected void doClose() throws IOException {
}
protected void onWrite() throws IOException {
}
}
}
a better way would be to modify the message using the DOM interface, you need to add the SAAJOutInterceptor first (this might have a performance hit for big requests) and then your custom interceptor that is executed in phase USER_PROTOCOL
import org.apache.cxf.binding.soap.SoapMessage;
import org.apache.cxf.binding.soap.interceptor.AbstractSoapInterceptor;
import org.apache.cxf.interceptor.Fault;
import org.apache.cxf.phase.Phase;
import org.w3c.dom.Node;
import javax.xml.soap.SOAPException;
import javax.xml.soap.SOAPMessage;
abstract public class SoapNodeModifierInterceptor extends AbstractSoapInterceptor {
SoapNodeModifierInterceptor() { super(Phase.USER_PROTOCOL); }
#Override public void handleMessage(SoapMessage message) throws Fault {
try {
if (message == null) {
return;
}
SOAPMessage sm = message.getContent(SOAPMessage.class);
if (sm == null) {
throw new RuntimeException("You must add the SAAJOutInterceptor to the chain");
}
modifyNodes(sm.getSOAPBody());
} catch (SOAPException e) {
throw new RuntimeException(e);
}
}
abstract void modifyNodes(Node node);
}
this one's working for me. It's based on StreamInterceptor class from configuration_interceptor example in Apache CXF samples.
It's in Scala instead of Java but the conversion is straightforward.
I tried to add comments to explain what's happening (as far as I understand).
import java.io.OutputStream
import org.apache.cxf.binding.soap.interceptor.SoapPreProtocolOutInterceptor
import org.apache.cxf.helpers.IOUtils
import org.apache.cxf.io.CachedOutputStream
import org.apache.cxf.message.Message
import org.apache.cxf.phase.AbstractPhaseInterceptor
import org.apache.cxf.phase.Phase
// java note: base constructor call is hidden at the end of class declaration
class StreamInterceptor() extends AbstractPhaseInterceptor[Message](Phase.PRE_STREAM) {
// java note: put this into the constructor after calling super(Phase.PRE_STREAM);
addBefore(classOf[SoapPreProtocolOutInterceptor].getName)
override def handleMessage(message: Message) = {
// get original output stream
val osOrig = message.getContent(classOf[OutputStream])
// our output stream
val osNew = new CachedOutputStream
// replace it with ours
message.setContent(classOf[OutputStream], osNew)
// fills the osNew instead of osOrig
message.getInterceptorChain.doIntercept(message)
// flush before getting content
osNew.flush()
// get filled content
val content = IOUtils.toString(osNew.getInputStream, "UTF-8")
// we got the content, we may close our output stream now
osNew.close()
// modified content
val modifiedContent = content.replace("a-string", "another-string")
// fill original output stream
osOrig.write(modifiedContent.getBytes("UTF-8"))
// flush before set
osOrig.flush()
// replace with original output stream filled with our modified content
message.setContent(classOf[OutputStream], osOrig)
}
}

How to call this Comets application from a web page

I have implemented the two classes shown at http://tomcat.apache.org/tomcat-6.0-doc/aio.html which gives a messenger application using Tomcat's comet implementation.
How do I connect this to a web interface and get something to display.
I am thinking these are the basic steps (I don't know the details).
I should create some traditional event - a button click or AJAX event - that calls the ChatServlet and passes in a CometEvent (somehow) - perhaps BEGIN
From then I have my code call the event method every time I want to send something to the client using the READ event as the input parameter.
I have copied the two classes below:
package controller;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.util.ArrayList;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.catalina.CometEvent;
import org.apache.catalina.CometProcessor;
public class ChatServlet extends HttpServlet implements CometProcessor {
protected ArrayList<HttpServletResponse> connections = new ArrayList<HttpServletResponse>();
protected MessageSender messageSender = null;
public void init() throws ServletException {
messageSender = new MessageSender();
Thread messageSenderThread = new Thread(messageSender, "MessageSender["
+ getServletContext().getContextPath() + "]");
messageSenderThread.setDaemon(true);
messageSenderThread.start();
}
public void destroy() {
connections.clear();
messageSender.stop();
messageSender = null;
}
/**
* Process the given Comet event.
*
* #param event
* The Comet event that will be processed
* #throws IOException
* #throws ServletException
*/
public void event(CometEvent event) throws IOException, ServletException {
HttpServletRequest request = event.getHttpServletRequest();
HttpServletResponse response = event.getHttpServletResponse();
if (event.getEventType() == CometEvent.EventType.BEGIN) {
log("Begin for session: " + request.getSession(true).getId());
PrintWriter writer = response.getWriter();
writer
.println("<!doctype html public \"-//w3c//dtd html 4.0 transitional//en\">");
writer
.println("<head><title>JSP Chat</title></head><body bgcolor=\"#FFFFFF\">");
writer.flush();
synchronized (connections) {
connections.add(response);
}
} else if (event.getEventType() == CometEvent.EventType.ERROR) {
log("Error for session: " + request.getSession(true).getId());
synchronized (connections) {
connections.remove(response);
}
event.close();
} else if (event.getEventType() == CometEvent.EventType.END) {
log("End for session: " + request.getSession(true).getId());
synchronized (connections) {
connections.remove(response);
}
PrintWriter writer = response.getWriter();
writer.println("</body></html>");
event.close();
} else if (event.getEventType() == CometEvent.EventType.READ) {
InputStream is = request.getInputStream();
byte[] buf = new byte[512];
do {
int n = is.read(buf); // can throw an IOException
if (n > 0) {
log("Read " + n + " bytes: " + new String(buf, 0, n)
+ " for session: "
+ request.getSession(true).getId());
} else if (n < 0) {
// error(event, request, response);
System.out.println("you have an error");
return;
}
} while (is.available() > 0);
}
}
}
package controller;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import javax.servlet.http.HttpServletResponse;
public class MessageSender implements Runnable {
protected boolean running = true;
protected ArrayList<String> messages = new ArrayList<String>();
protected ArrayList<HttpServletResponse> connections = new ArrayList<HttpServletResponse>();
public MessageSender() {
}
public void stop() {
running = false;
}
/**
* Add message for sending.
*/
public void send(String user, String message) {
synchronized (messages) {
messages.add("[" + user + "]: " + message);
messages.notify();
}
}
public void run() {
while (running) {
if (messages.size() == 0) {
try {
synchronized (messages) {
messages.wait();
}
} catch (InterruptedException e) {
// Ignore
}
}
synchronized (connections) {
String[] pendingMessages = null;
synchronized (messages) {
pendingMessages = messages.toArray(new String[0]);
messages.clear();
}
// Send any pending message on all the open connections
for (int i = 0; i < connections.size(); i++) {
try {
PrintWriter writer = connections.get(i).getWriter();
for (int j = 0; j < pendingMessages.length; j++) {
writer.println(pendingMessages[j] + "<br>");
}
writer.flush();
} catch (IOException e) {
System.out.println("IOExeption sending message" + e);
}
}
}
}
}
}
Here you have a complete example for Tomcat with source code to download at the bottom:Developing with Comet and Java

Resources