Get Hawtio to show more than 255 chars in JMS messages - jms

I'm using Hawtio to browse my ActiveMQ queues. I'd also like to be able to edit a JMS message before resending it to another queue.
I don't see how I can edit a message in Hawtio, but that's fine, I guess this is not really legal to modify a message directly in the broker.
Instead, I though I would copy the message body and send a new message with the body modified. Now, the problem I'm facing is that I can only see the first 255 chars of the message body. How can I see the entire ActiveMQ message in hawtio? Not just the first 255 characters.

Hawtio uses to browse the queue the JMX interface. It calls the browse() method on the queue. Which returns the messages as CompositedData[].
When a ActiveMQBytesMessage is converted (check class org.apache.activemq.broker.jmx.OpenTypeSupport.ByteMessageOpenTypeFactory) two fields are added BodyLength and BodyPreview. The fields return the following data.
BodyLength - the length of JMS message body
BodyPreview - the first 255 bytes of the JMS message body (the length which is hardcoded, as Claus Ibsen already said in his answer ;-) )
Check in class org.apache.activemq.broker.jmx.OpenTypeSupport.ByteMessageOpenTypeFactory the method Map<String, Object> getFields(Object o).
Hawtio uses the field BodyPreview to display the message, for non text messages.
Check in Hawtio the file hawtio-web/src/main/webapp/app/activemq/js/browse.ts
function createBodyText(message) {
if (message.Text) {
...
} else if (message.BodyPreview) {
...
if (code === 1 || code === 2) {
// bytes and text
var len = message.BodyPreview.length;
var lenTxt = "" + textArr.length;
body = "bytes:\n" + bytesData + "\n\ntext:\n" + textData;
message.textMode = "bytes (" + len + " bytes) and text (" + lenTxt + " chars)";
} else {
// bytes only
var len = message.BodyPreview.length;
body = bytesData;
message.textMode = "bytes (" + len + " bytes)";
}
...
} else {
message.textMode = "unsupported";
...
If you want to change it you either have to change it in ActiveMQ or in Hawtio.
A lengthy and verbose example to demonstrate the explanation.
import static java.lang.System.out;
import java.lang.management.ManagementFactory;
import java.util.Enumeration;
import java.util.concurrent.TimeUnit;
import javax.jms.BytesMessage;
import javax.jms.Connection;
import javax.jms.MessageProducer;
import javax.jms.Queue;
import javax.jms.QueueBrowser;
import javax.jms.Session;
import javax.jms.TextMessage;
import javax.management.MBeanServer;
import javax.management.MBeanServerInvocationHandler;
import javax.management.ObjectName;
import javax.management.openmbean.CompositeData;
import javax.management.openmbean.CompositeType;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.broker.BrokerFactory;
import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.broker.jmx.QueueViewMBean;
import org.apache.activemq.command.ActiveMQBytesMessage;
import org.apache.activemq.command.ActiveMQTextMessage;
public class BodyPreviewExample {
public static void main(String[] args) throws Exception {
String password = "password";
String user = "user";
String queueName = "TEST_QUEUE";
String brokerUrl = "tcp://localhost:61616";
BrokerService broker = BrokerFactory.createBroker("broker:"+brokerUrl);
broker.start();
broker.waitUntilStarted();
Connection conn = new ActiveMQConnectionFactory(brokerUrl)
.createConnection(user, password);
conn.start();
Session session = conn.createSession(false, Session.AUTO_ACKNOWLEDGE);
Queue producerQueue = session.createQueue(queueName);
MessageProducer producer = session.createProducer(producerQueue);
// create a dummy message
StringBuilder sb = new StringBuilder(1000);
for (int i = 0; i < 100; i++) {
sb.append(">CAFEBABE<");
}
// create and send a JMSBytesMessage
BytesMessage bytesMsg = session.createBytesMessage();
bytesMsg.writeBytes(sb.toString().getBytes());
producer.send(bytesMsg);
// create and send a JMSTextMessage
TextMessage textMsg = session.createTextMessage();
textMsg.setText(sb.toString());
producer.send(textMsg);
producer.close();
out.printf("%nmessage info via session browser%n");
String format = "%-20s = %s%n";
Queue consumerQueue = session.createQueue(queueName);
QueueBrowser browser = session.createBrowser(consumerQueue);
for (Enumeration p = browser.getEnumeration(); p.hasMoreElements();) {
out.println();
Object next = p.nextElement();
if (next instanceof ActiveMQBytesMessage) {
ActiveMQBytesMessage amq = (ActiveMQBytesMessage) next;
out.printf(format, "JMSMessageID", amq.getJMSMessageID());
out.printf(format, "JMSDestination", amq.getJMSDestination());
out.printf(format, "JMSXMimeType", amq.getJMSXMimeType());
out.printf(format, "BodyLength", amq.getBodyLength());
} else if (next instanceof ActiveMQTextMessage) {
ActiveMQTextMessage amq = (ActiveMQTextMessage) next;
out.printf(format, "JMSMessageID", amq.getJMSMessageID());
out.printf(format, "JMSDestination", amq.getJMSDestination());
out.printf(format, "JMSXMimeType", amq.getJMSXMimeType());
out.printf(format, "text.length", amq.getText().length());
} else {
out.printf("unhandled message type: %s%n", next.getClass());
}
}
session.close();
conn.close();
// access the queue via JMX
out.printf("%nmessage info via JMX browse operation%n");
MBeanServer mbeanServer = ManagementFactory.getPlatformMBeanServer();
ObjectName name = new ObjectName("org.apache.activemq:type=Broker"
+ ",brokerName=localhost"
+ ",destinationType=Queue"
+ ",destinationName=" + queueName);
QueueViewMBean queue
= MBeanServerInvocationHandler.newProxyInstance(mbeanServer,
name, QueueViewMBean.class, true);
CompositeData[] browse = queue.browse();
for (CompositeData compositeData : browse) {
out.println();
CompositeType compositeType = compositeData.getCompositeType();
out.printf(format, "CompositeType", compositeType.getTypeName());
out.printf(format,"JMSMessageID",compositeData.get("JMSMessageID"));
if (compositeData.containsKey("BodyLength")) {
// body length of the ActiveMQBytesMessage
Long bodyLength = (Long) compositeData.get("BodyLength");
out.printf(format, "BodyLength", bodyLength);
// the content displayed by hawtio
Byte[] bodyPreview = (Byte[]) compositeData.get("BodyPreview");
out.printf(format, "size of BodyPreview", bodyPreview.length);
} else if (compositeData.containsKey("Text")) {
String text = (String) compositeData.get("Text");
out.printf(format, "Text.length()", text.length());
}
}
// uncomment if you want to check with e.g. JConsole
// TimeUnit.MINUTES.sleep(5);
broker.stop();
}
}
example output
message info via session browser
JMSMessageID = ID:hostname-50075-1467979678722-3:1:1:1:1
JMSDestination = queue://TEST_QUEUE
JMSXMimeType = jms/bytes-message
BodyLength = 1000
JMSMessageID = ID:hostname-50075-1467979678722-3:1:1:1:2
JMSDestination = queue://TEST_QUEUE
JMSXMimeType = jms/text-message
text.length = 1000
message info via JMX browse operation
CompositeType = org.apache.activemq.command.ActiveMQBytesMessage
JMSMessageID = ID:hostname-50075-1467979678722-3:1:1:1:1
BodyLength = 1000
size of BodyPreview = 255
CompositeType = org.apache.activemq.command.ActiveMQTextMessage
JMSMessageID = ID:hostname-50075-1467979678722-3:1:1:1:2
Text.length() = 1000

I think there is a hardcoded limitation in ActiveMQ when you query and browse the queues using the JMX API which is what hawtio uses. But cannot remember if its only 255 bytes or not higher.
Look in hawtio settings, there is maybe an ActiveMQ plugin setting to change the 255 chars, can't remember either ;)

Related

MQHeaderList size is 0, but reading a specific MQRFH2 works

I am wring custom java code to read messages from Websphere MQ (version 8) and read all the headers from the MQ message.
When I use the MQHeaderList to parse all the headers the list size is 0:
MQMessage message = new MQMessage();
queue.get(message, getOptions);
DataInput in = new DataInputStream (new ByteArrayInputStream (b));
MQHeaderList headersfoundlist = null;
headersfoundlist = new MQHeaderList (in);
System.out.println("headersfoundlist size: " + headersfoundlist.size());
However, I read only a specific MQRFH2 it works
MQMessage message = new MQMessage();
queue.get(message, getOptions);
DataInput in = new DataInputStream (new ByteArrayInputStream (b));
MQRFH2 rfh2 = new MQRFH2(in);
Element usrfolder = rfh2.getFolder("usr", false);
System.out.println("usr folder" + usrfolder);
How can I parse all the headers of the MQ Message?
DataInput in = new DataInputStream (new ByteArrayInputStream (b));
What's that about? Not sure why you want to do that.
It should just be:
MQMessage message = new MQMessage();
queue.get(message, getOptions);
MQHeaderList headersfoundlist = new MQHeaderList(message);
System.out.println("headersfoundlist size: " + headersfoundlist.size());
Read more here.
Update:
#anshu's comment about it not working, well, I've always found MQHeaderList class to be very buggy. Hence, that is why I don't use it.
Also, 99.99% messages in MQ will only ever have 1 embedded MQ header (i.e. MQRFH2). Note: A JMS message == MQRFH2 message. The only case where you will find 2 embedded MQ headers are for messages on the Dead Letter Queue.
i.e.
{MQDLH}{MQRFH2}{message payload}
Is there a real need for your application to process multiple embedded MQ headers? Is your application putting/getting JMS messages (aka MQRFH2 messages)?
If so then you should do something like the following:
queue.get(receiveMsg, gmo);
if (CMQC.MQFMT_RF_HEADER_2.equals(receiveMsg.format))
{
receiveMsg.seek(0);
MQRFH2 rfh2 = new MQRFH2(receiveMsg);
int strucLen = rfh2.getStrucLength();
int encoding = rfh2.getEncoding();
int CCSID = rfh2.getCodedCharSetId();
String format= rfh2.getFormat();
int flags = rfh2.getFlags();
int nameValueCCSID = rfh2.getNameValueCCSID();
String[] folderStrings = rfh2.getFolderStrings();
for (String folder : folderStrings)
System.out.println.logger("Folder: "+folder);
if (CMQC.MQFMT_STRING.equals(format))
{
String msgStr = receiveMsg.readStringOfByteLength(receiveMsg.getDataLength());
System.out.println.logger("Data: "+msgStr);
}
else if (CMQC.MQFMT_NONE.equals(format))
{
byte[] b = new byte[receiveMsg.getDataLength()];
receiveMsg.readFully(b);
System.out.println.logger("Data: "+new String(b));
}
}
else if ( (CMQC.MQFMT_STRING.equals(receiveMsg.format)) ||
(CMQC.MQFMT_NONE.equals(receiveMsg.format)) )
{
Enumeration<String> props = receiveMsg.getPropertyNames("%");
if (props != null)
{
System.out.println.logger("Named Properties:");
while (props.hasMoreElements())
{
String propName = props.nextElement();
Object o = receiveMsg.getObjectProperty(propName);
System.out.println.logger(" Name="+propName+" : Value="+o);
}
}
if (CMQC.MQFMT_STRING.equals(receiveMsg.format))
{
String msgStr = receiveMsg.readStringOfByteLength(receiveMsg.getMessageLength());
System.out.println.logger("Data: "+msgStr);
}
else
{
byte[] b = new byte[receiveMsg.getMessageLength()];
receiveMsg.readFully(b);
System.out.println.logger("Data: "+new String(b));
}
}
else
{
byte[] b = new byte[receiveMsg.getMessageLength()];
receiveMsg.readFully(b);
System.out.println.logger("Data: "+new String(b));
}
I found the mistake in my code. I have a few more steps before reading the headers. It was moving the cursor in message buffer to the end.
I added message.setDataOffset(0); before reading headers and it worked.

jmeter kafka consumer throwing error as [ClassCastException: [Ljava.lang.String; cannot be cast to java.util.List]

I am trying to read kafka messages using a kafka consumer in jmeter using jsr223 sampler. iam unable to understand the error
[Response message: javax.script.ScriptException: javax.script.ScriptException: java.lang.ClassCastException: [Ljava.lang.String; cannot be cast to java.util.List]
Please Help me solve the issue so that i can subscribe and consume messages using the kafka consumer.
import java.util.Properties;
import java.util.Arrays;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.ConsumerRecord;
Properties props = new Properties();
String groupID = "REQUEST_RESPONSE_JOB_GROUP";
String clientID = "REQUEST_RESPONSE_JOB_CLIENT";
String BSID = "kafka:9092";
String topic = "PROC_REST_EVENTS";
props.put("bootstrap.servers", BSID);
props.put("group.id", groupID);
props.put("client.id", clientID);
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", "1000");
props.put("session.timeout.ms", "30000");
props.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
props.put("partition.assignment.strategy","org.apache.kafka.clients.consumer.RangeAssignor");
KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);
//Kafka Consumer subscribes list of topics here.
consumer.subscribe(Arrays.asList(topic));
//print the topic name
System.out.println("Subscribed to topic " + topic);
while (true) {
ConsumerRecords<String, String> records = consumer.poll(100);
for (ConsumerRecord<String, String> record : records)
// print the offset,key and value for the consumer records.
System.out.printf("offset = %d, key = %s, value = %s\n",
record.offset(), record.key(), record.value());
return records;
}
Most probably you're getting a List from the Kafka topic while your consumer expects a String, you need to amend consumer configuration to match the types which come from the topic.
Try out the following Groovy code which sends 3 messages to the test topic (if it doesn't exist you will need to create it) and reads them after this.
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.consumer.KafkaConsumer
import org.apache.kafka.clients.producer.KafkaProducer
import org.apache.kafka.clients.producer.ProducerConfig
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.LongDeserializer
import org.apache.kafka.common.serialization.LongSerializer
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.kafka.common.serialization.StringSerializer
def BOOTSTRAP_SERVERS = 'localhost:9092'
def TOPIC = 'test'
Properties kafkaProps = new Properties()
kafkaProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS)
kafkaProps.put(ProducerConfig.CLIENT_ID_CONFIG, 'KafkaExampleProducer')
kafkaProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName())
kafkaProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName())
kafkaProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS)
kafkaProps.put(ConsumerConfig.GROUP_ID_CONFIG, 'KafkaExampleConsumer')
kafkaProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, LongDeserializer.class.getName())
kafkaProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName())
def producer = new KafkaProducer<>(kafkaProps)
def consumer = new KafkaConsumer<>(kafkaProps)
1.upto(3) {
def record = new ProducerRecord<>(TOPIC, it as long, 'Hello from JMeter ' + it)
producer.send(record)
log.info('Sent record(key=' + record.key() + 'value=' + record.value() + ')')
}
consumer.subscribe(Collections.singletonList(TOPIC))
final int giveUp = 100
int noRecordsCount = 0
while (true) {
def consumerRecords = consumer.poll(1000)
if (consumerRecords.count() == 0) {
noRecordsCount++
if (noRecordsCount > giveUp) break
else continue
}
consumerRecords.each { record ->
log.info('Received Record:(' + record.key() + ', ' + record.value() + ')')
}
consumer.commitAsync()
}
consumer.close()
You should see output like:
Once done you should be able to use the above code as a basis for your own Kafka messages consumption testing. See Apache Kafka - How to Load Test with JMeter article for more information on Kafka load testing with JMeter.

How to set Encoding in MQ headers using Beanshell in Jmeter

I am developing a test script to put a message onto a queue using IBM MQ API 8.0. I am using JMeter 3.1 and Beanshell Sampler for this (see code below).
The problem I am having is setting the "Encoding" field in the MQ headers. I've tried different methods as per API documentation, but nothing worked for me.
Has anyone faced this issue?
Thanks in advance!
Code below:
try {
MQEnvironment.hostname = _hostname;
MQEnvironment.channel = _channel;
MQEnvironment.port = _port;
MQEnvironment.userID = "";
MQEnvironment.password = "";
log.info("Using queue manager: " + _qMgr);
MQQueueManager _queueManager = new MQQueueManager(_qMgr);
int openOptions = CMQC.MQOO_OUTPUT + CMQC.MQOO_FAIL_IF_QUIESCING + CMQC.MQOO_INQUIRE + CMQC.MQOO_BROWSE
+ CMQC.MQOO_SET_IDENTITY_CONTEXT;
log.info("Using queue: " + _queueName + ", openOptions: " + openOptions);
MQQueue queue = _queueManager.accessQueue(_queueName, openOptions);
log.info("Building message...");
MQMessage sendmsg = new MQMessage();
sendmsg.clearMessage();
// Set MQ MD Headers
sendmsg.messageType = CMQC.MQMT_DATAGRAM;
sendmsg.replyToQueueName = _queueName;
sendmsg.replyToQueueManagerName = _qMgr;
sendmsg.userId = MQuserId;
sendmsg.setStringProperty("BAH_FR", fromBIC); // from /AppHdr/Fr/FIId/FinInstnId/BICFI
sendmsg.setStringProperty("BAH_TO", toBIC); // from /AppHdr/To/FIId/FinInstnId/BICFI
sendmsg.setStringProperty("BAH_MSGDEFIDR", "pacs.008.001.05"); // from /AppHdr/MsgDefIdr
sendmsg.setStringProperty("BAH_BIZSVC", "cus.clear.01-" + bizSvc); // from /AppHdr/BizSvcr
sendmsg.setStringProperty("BAH_PRTY", "NORM"); // priority
sendmsg.setStringProperty("userId", MQuserId); // user Id
sendmsg.setStringProperty("ConnectorId", connectorId);
sendmsg.setStringProperty("Roles", roleId);
MQPutMessageOptions pmo = new MQPutMessageOptions(); // accept the defaults, same as MQPMO_DEFAULT constant
pmo.options = CMQC.MQOO_SET_IDENTITY_CONTEXT; // set identity context by userId
// Build message
String msg = "<NS1> .... </NS1>";
// MQRFH2 Headers
sendmsg.format = CMQC.MQFMT_STRING;
//sendmsg.encoding = CMQC.MQENC_INTEGER_NORMAL | CMQC.MQENC_DECIMAL_NORMAL | CMQC.MQENC_FLOAT_IEEE_NORMAL;
sendmsg.encoding = 546; // encoding - 546 Windows/Linux
sendmsg.messageId = msgID.getBytes();
sendmsg.correlationId = CMQC.MQCI_NONE;
sendmsg.writeString(msg);
String messageIdBefore = new String(sendmsg.messageId, "UTF-8");
log.info("Before put, messageId=[" + messageIdBefore + "]");
int depthBefore = queue.getCurrentDepth();
log.info("Queue Depth=" + depthBefore);
log.info("Putting message on " + _queueName + ".... ");
queue.put(sendmsg, pmo);
int depthAfter = queue.getCurrentDepth();
log.info("Queue Depth=" + depthAfter);
log.info("**** Done");
String messageIdAfter = new String(sendmsg.messageId, "UTF-8");
log.info("After put, messageId=[" + messageIdAfter + "]");
log.info("Closing connection...");
} catch (Exception e) {
log.info("\\nFAILURE - Exception\\n");
StringWriter errors = new StringWriter();
e.printStackTrace(new PrintWriter(errors));
log.error(errors.toString());
}
I think you are over thinking the problem. If you are not doing some sort of weird manual character/data conversion then you should be using:
sendmsg.encoding = MQC.MQENC_NATIVE;

Catch incoming emails and send them to a web service (rather than just to a mail server)

I would like to catch incoming emails and send them a web service (rather than just to a mail server).
--
After some searching I found a way of getting new emails via polling - see below: This may be of some help to others. Is there a way to receive messages by SMTP? Perhaps by ISAPI ???
using Limilabs.Mail;
using Limilabs.Client.IMAP;
public ActionResult checkIMAPmail()
{
string rval = "not a sausage";
using (Imap imap = new Imap())
{
imap.Connect(<mail server>);
imap.Login(<username>, <password>);
imap.SelectInbox();
List<long> uids = imap.Search(Flag.Unseen);
foreach (long uid in uids)
{
byte[] ourBytes = imap.GetMessageByUID(uid);
IMail email = new MailBuilder().CreateFromEml(ourBytes);
rval = email.Subject + " [" + email.From + "][" + email.Text + "]";
}
imap.Close();
}
return Content(rval, "text/html");
}
See also http://stackoverflow.com/questions/670183/accessing-imap-in-c-sharp
for other IMAP packages, although note the change to using byte[], above.
Given that Limilabs.Mail is a paid service, I finally used MailKit:
using MailKit;
public int checkIMAPmail()
{
int numEmails = 0;
try {
using (var client = new MailKit.Net.Imap.ImapClient())
{
client.ServerCertificateValidationCallback = (s, c, h, e) => true;
client.Connect(ourSmtpClient);
// disable the XOAUTH2 authentication mechanism.
client.AuthenticationMechanisms.Remove("XOAUTH2");
client.Authenticate(ourSmtpAdminUser, ourSmtpAdminUserPwd);
// The Inbox folder is always available on all IMAP servers...
var inboxFolder = client.Inbox;
var savedFolder = client.GetFolder("saved");
inboxFolder.Open(FolderAccess.ReadWrite);
for (int ii = 0; ii < inboxFolder.Count; ii++)
{
var query = MailKit.Search.SearchQuery.NotSeen;
foreach (var uid in inboxFolder.Search(query))
{
var thisMsg = inboxFolder.GetMessage(uid);
string thisDate = notNullString(thisMsg.Date);
string thisSubject = notNullString( thisMsg.Subject);
string thisBody = notNullString(thisMsg.GetTextBody(0)); // plain text
string thisFromName = "";
string thisFromEmail = "";
if ( thisMsg.From != null)
{
// just get the first
foreach( var mb in thisMsg.From.Mailboxes)
{
thisFromName = notNullString( mb.Name);
thisFromEmail = notNullString( mb.Address);
break;
}
}
numEmails += 1;
// move email to saved
inboxFolder.MoveTo(uid, savedFolder);
}
}
client.Disconnect(true);
}
}
catch (Exception exc)
{
log2file("checkIMAPmail Error: " + exc.ToString());
}
return numEmails;
}

Why did not my custom Receiver get the right output?

I have a question about custom receiver of spark streaming.
I code a copy like the following :
import java.io.{BufferedReader, InputStreamReader}
import java.net.Socket
import java.nio.charset.StandardCharsets
import org.apache.spark.SparkConf
import org.apache.spark.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.receiver.Receiver
/**
* Custom Receiver that receives data over a socket. Received bytes is interpreted as
* text and \n delimited lines are considered as records. They are then counted and printed.
*
* To run this on your local machine, you need to first run a Netcat server
* `$ nc -lk 9999`
* and then run the example
* `$ bin/run-example org.apache.spark.examples.streaming.CustomReceiver localhost 9999`
*/
object CustomReceiver2 {
def main(args: Array[String]) {
// Create the context with a 1 second batch size
val sparkConf = new SparkConf().setAppName("CustomReceiver")
val ssc = new StreamingContext(sparkConf, Seconds(1))
// Create a input stream with the custom receiver on target ip:port and count the
// words in input stream of \n delimited text (eg. generated by 'nc')
val lines = ssc.receiverStream(new CustomReceiver2("192.168.1.0", 11.toInt))
lines.saveAsObjectFiles("hdfs:/tmp/customreceiver/")
ssc.start()
ssc.awaitTermination()
}
}
class CustomReceiver2(host: String, port: Int)
extends Receiver[String](StorageLevel.MEMORY_AND_DISK_2) with Logging {
def onStart() {
// Start the thread that receives data over a connection
new Thread("Socket Receiver") {
override def run() { receive() }
}.start()
}
def onStop() {
}
/** Create a socket connection and receive data until receiver is stopped */
private def receive() {
var socket: Socket = null
var userInput: String = null
try {
logInfo("Connecting to " + host + ":" + port)
socket = new Socket(host, port)
logInfo("Connected to " + host + ":" + port)
val reader = new BufferedReader(
new InputStreamReader(socket.getInputStream(), StandardCharsets.UTF_8))
userInput = reader.readLine()
while(!isStopped && userInput != null) {
println("userInput= "+userInput)
store(userInput)
userInput = reader.readLine()
println("==store data finished==")
}
reader.close()
socket.close()
logInfo("Stopped receiving")
restart("Trying to connect again")
} catch {
case e: java.net.ConnectException =>
restart("Error connecting to " + host + ":" + port, e)
case t: Throwable =>
restart("Error receiving data", t)
}
}
}
It passes the compiler and runs smoothly. I did not take the output as i expected, because the hdfs file was empty all the time.
Could anyone give me a hand ?

Resources