Does anyone know how to directly format a Mifare classic 1k card as NDEF using java NFC tools library? I am using an ACR 122U reader/writer
I use this code to format a Mifare Classik card to NDEF format
package name.leiqin.test.nfc;
import java.io.IOException;
import org.nfctools.NfcAdapter;
import org.nfctools.api.TagInfo;
import org.nfctools.mf.classic.MfClassicNfcTagListener;
import org.nfctools.mf.ul.Type2NfcTagListener;
import org.nfctools.ndef.NdefOperations;
import org.nfctools.ndef.NdefOperationsListener;
import org.nfctools.scio.Terminal;
import org.nfctools.scio.TerminalHandler;
import org.nfctools.scio.TerminalMode;
import org.nfctools.spi.acs.AcsTerminal;
import org.nfctools.spi.scm.SclTerminal;
import org.nfctools.utils.LoggingUnknownTagListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FormatCard implements NdefOperationsListener {
private static final Logger logger = LoggerFactory
.getLogger(FormatCard.class);
public static void main(String[] args) throws IOException {
TerminalHandler terminalHandler = new TerminalHandler();
terminalHandler.addTerminal(new AcsTerminal());
terminalHandler.addTerminal(new SclTerminal());
Terminal terminal = terminalHandler.getAvailableTerminal(null);
logger.info("terminal : {}", terminal);
NfcAdapter adapter = new NfcAdapter(terminal, TerminalMode.INITIATOR);
FormatCard formatListener = new FormatCard();
adapter.registerTagListener(new Type2NfcTagListener(formatListener));
adapter.registerTagListener(new MfClassicNfcTagListener(formatListener));
adapter.registerUnknownTagListerner(new LoggingUnknownTagListener());
logger.info("nfc adapter : {}", adapter);
adapter.startListening();
System.out.println("Waiting for tags, press ENTER to exit");
System.in.read();
}
#Override
public void onNdefOperations(NdefOperations ndefOperations) {
TagInfo tagInfo = ndefOperations.getTagInfo();
logger.info("tag id : {}", toHex(tagInfo.getId(), ""));
logger.info("tag type : {}", tagInfo.getTagType());
if (ndefOperations.isWritable()) {
if (ndefOperations.isFormatted()) {
logger.info("tag is already formatred");
} else {
logger.info("format start");
ndefOperations.format();
logger.info("format end");
}
} else {
logger.info("Tag not writable");
}
}
public static String toHex(byte[] bs, String separator) {
if (separator == null)
separator = " ";
StringBuffer sb = new StringBuffer();
for (byte b : bs) {
int i = b & 0xFF;
String hex = Integer.toHexString(i).toUpperCase();
if (hex.length() == 1)
hex = "0" + hex;
sb.append(hex);
sb.append(separator);
}
sb.setLength(sb.length() - separator.length());
return sb.toString();
}
}
Related
The only supported i18n formats for Spring are .properties and .xml, but it's not really optimal.
What I'd like is to have a complex Yaml file (messages.yml and messages_xx.yml) that get converted to .properties in a Gradle task so I can queue it before Build task.
For example, a messages.yml would look like:
group1:
group2:
group3:
message1: hello
message2: how are you?
group4:
message3: good
group5:
group6:
message4: let's party
And the output .properties would be:
group1.group2.group3.message1: hello
group1.group2.group3.message2: how are you?
group1.group2.group4.message3: good
group1.group5.group6.message4: let's party
Is there a way to achieve this?
I didn't find existing converters.
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Map;
import java.util.TreeMap;
import org.yaml.snakeyaml.Yaml;
public class YamlBackToProperties {
public static void main(String[] args) throws IOException {
Yaml yaml = new Yaml();
try (InputStream in = Files.newInputStream(Paths.get("test.yml"))) {
TreeMap<String, Map<String, Object>> config = yaml.loadAs(in, TreeMap.class);
System.out.println(String.format("%s%n\nConverts to Properties:%n%n%s", config.toString(), toProperties(config)));
}
}
private static String toProperties(TreeMap<String, Map<String, Object>> config) {
StringBuilder sb = new StringBuilder();
for (String key : config.keySet()) {
sb.append(toString(key, config.get(key)));
}
return sb.toString();
}
private static String toString(String key, Map<String, Object> map) {
StringBuilder sb = new StringBuilder();
for (String mapKey : map.keySet()) {
if (map.get(mapKey) instanceof Map) {
sb.append(toString(String.format("%s.%s", key, mapKey), (Map<String, Object>) map.get(mapKey)));
} else {
sb.append(String.format("%s.%s=%s%n", key, mapKey, map.get(mapKey).toString()));
}
}
return sb.toString();
}
}
Made some changes to the first answer. Now works for me for all cases:
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.Map;
import java.util.TreeMap;
import org.yaml.snakeyaml.Yaml;
public class YamlConverter {
public static void main(String[] args) throws IOException {
Yaml yaml = new Yaml();
try (InputStream in = Files.newInputStream(Paths.get("yourpath/application.yml"))) {
TreeMap<String, Map<String, Object>> config = yaml.loadAs(in, TreeMap.class);
System.out.println(String.format("%s%n\nConverts to Properties:%n%n%s", config.toString(), toProperties(config)));
}
}
private static String toProperties(TreeMap<String, Map<String, Object>> config) {
StringBuilder sb = new StringBuilder();
for (String key : config.keySet()) {
sb.append(toString(key, config.get(key)));
}
return sb.toString();
}
private static String toString(String key, Object mapr) {
StringBuilder sb = new StringBuilder();
if(!(mapr instanceof Map)) {
sb.append(key+"="+mapr+"\n");
return sb.toString();
}
Map<String, Object> map = (Map<String, Object>)mapr;
for (String mapKey : map.keySet()) {
if (map.get(mapKey) instanceof Map) {
sb.append(toString(key+"."+mapKey, map.get(mapKey)));
} else {
sb.append(String.format("%s.%s=%s%n", key, mapKey, map.get(mapKey).toString()));
}
}
return sb.toString();
}
}
Here's a straighforward implementation in Kotlin:
Once you have a Map with the parsed Yaml just call flatten():
fun flatten(map: Map<String, *>): MutableMap<String, Any> {
val processed = mutableMapOf<String, Any>()
map.forEach { key, value ->
doFlatten(key, value as Any, processed)
}
return processed
}
fun doFlatten(parentKey: String, value: Any, processed: MutableMap<String, Any>) {
if (value is Map<*, *>) {
value.forEach {
doFlatten("$parentKey.${it.key}", it.value as Any, processed)
}
} else {
processed[parentKey] = value
}
}
You can try like this
package com.example.yaml;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import org.yaml.snakeyaml.Yaml;
public class YamlConfigRunner {
public static void main(String[] args) throws IOException {
if( args.length != 1 ) {
System.out.println( "Usage: <file.yml>" );
return;
}
Yaml yaml = new Yaml();
try( InputStream in = Files.newInputStream( Paths.get( args[ 0 ] ) ) ) {
Configuration config = yaml.loadAs( in, Configuration.class );
System.out.println( config.toString() );
}
}
}
reference: https://dzone.com/articles/using-yaml-java-application
Does somebody implemented confluent-kafka messages deserializer to consume kafka messages by spring "#KafkaListener"-s ?
Here is my answer, which I've implemented based on: "io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer"
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Map;
import javax.xml.bind.DatatypeConverter;
import org.apache.avro.Schema;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.specific.SpecificDatumReader;
import org.apache.avro.specific.SpecificRecordBase;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Deserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AvroConfluentDeserializer<T extends SpecificRecordBase> implements Deserializer<T> {
private static final Logger LOG = LoggerFactory.getLogger(AvroConfluentDeserializer.class);
protected static final byte MAGIC_BYTE = 0x0;
protected static final int idSize = 4;
private final DecoderFactory decoderFactory = DecoderFactory.get();
protected final Class<T> targetType;
public AvroConfluentDeserializer(Class<T> targetType) {
this.targetType = targetType;
}
#Override
public void close() {
// No-op
}
#Override
public void configure(Map<String, ?> arg0, boolean arg1) {
// No-op
}
#Override
public T deserialize(String topic, byte[] data) {
try {
T result = null;
if (data != null) {
LOG.info("data='{}'", DatatypeConverter.printHexBinary(data));
result = (T) deserializePayload(data, targetType.newInstance().getSchema());
LOG.info("deserialized data='{}'", result);
}
return result;
} catch (Exception ex) {
throw new SerializationException(
"Can't deserialize data '" + Arrays.toString(data) + "' from topic '" + topic + "'", ex);
}
}
protected T deserializePayload(byte[] payload, Schema schema) throws SerializationException {
int id = -1;
try {
ByteBuffer buffer = getByteBuffer(payload);
id = buffer.getInt();
int length = buffer.limit() - 1 - idSize;
int start = buffer.position() + buffer.arrayOffset();
DatumReader<T> reader = new SpecificDatumReader<T>(schema);
return reader.read(null, decoderFactory.binaryDecoder(buffer.array(), start, length, null));
} catch (IOException | RuntimeException e) {
throw new SerializationException("Error deserializing Avro message for id " + id, e);
}
}
private ByteBuffer getByteBuffer(byte[] payload) {
ByteBuffer buffer = ByteBuffer.wrap(payload);
if (buffer.get() != MAGIC_BYTE) {
throw new SerializationException("Unknown magic byte!");
}
return buffer;
}
}
2017-08-08 15:41:59,915 ERROR o.a.j.u.BeanShellInterpreter: Error invoking bsh method: eval Sourced file: inline evaluation of: import java.io.*; import org.apache.jmeter.protocol.tcp.sampler.*; import java.u . . . '' : Typed variable declaration : Object constructor
2017-08-08 15:41:59,915 WARN o.a.j.m.BeanShellPreProcessor: Problem in BeanShell script. org.apache.jorphan.util.JMeterException: Error invoking bsh method: eval Sourced file: inline evaluation of:import java.io.; import org.apache.jmeter.protocol.tcp.sampler.; import java.u . . . '' : Typed variable declaration : Object constructor
1
I would like to test the server with jmeter from echo server. Once it was sent to the server, it was successful. But the class I use is only a number. How to get a sentence in the jmeter?? this is first question.
The second problem can be seen from the photograph. response message is ok but i have beanshall error in the code. How to fix that?
`
package org.apache.jmeter.protocol.tcp.sampler;
import java.io.IOException;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.util.JOrphanUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LengthPrefixedBinaryTCPClientImpl extends
TCPClientDecorator {
private static final Logger log =
LoggerFactory.getLogger(LengthPrefixedBinaryTCPClientImpl.class);
private final int lengthPrefixLen = JMeterUtils.getPropDefault("tcp.binarylength.prefix.length", 2);
public LengthPrefixedBinaryTCPClientImpl() {
super(new BinaryTCPClientImpl());
tcpClient.setEolByte(Byte.MAX_VALUE+1);
}
#Override
public void write(OutputStream os, String s) throws IOException{
os.write(intToByteArray(s.length()/2,lengthPrefixLen));
if(log.isDebugEnabled()) {
log.debug("Wrote: " + s.length()/2 + " bytes");
}
this.tcpClient.write(os, s);
}
#Override
public void write(OutputStream os, InputStream is) throws IOException {
this.tcpClient.write(os, is);
}
#Override
public String read(InputStream is) throws ReadException{
byte[] msg = new byte[0];
int msgLen = 0;
byte[] lengthBuffer = new byte[lengthPrefixLen];
try {
if (is.read(lengthBuffer, 0, lengthPrefixLen) == lengthPrefixLen) {
msgLen = byteArrayToInt(lengthBuffer);
msg = new byte[msgLen];
int bytes = JOrphanUtils.read(is, msg, 0, msgLen);
if (bytes < msgLen) {
log.warn("Incomplete message read, expected: "+msgLen+" got:
"+bytes);
}
}
String buffer = JOrphanUtils.baToHexString(msg);
if(log.isDebugEnabled()) {
log.debug("Read: " + msgLen + "\n" + buffer);
}
return buffer;
}
catch(IOException e) {
throw new ReadException("", e, JOrphanUtils.baToHexString(msg));
}
}
/**
* Not useful, as the byte is never used.
* <p>
* {#inheritDoc}
*/
#Override
public byte getEolByte() {
return tcpClient.getEolByte();
}
/**
* {#inheritDoc}
*/
#Override
public void setEolByte(int eolInt) {
throw new UnsupportedOperationException("Cannot set eomByte for prefixed
messages");
}
}`
I am trying to add a coprocessor with one hbase table and it is failing with error -
2016-03-15 14:40:14,130 INFO org.apache.hadoop.hbase.regionserver.RSRpcServices: Open PRODUCT_DETAILS,,1457953190424.f687dd250bfd1f18ffbb8075fd625145.
2016-03-15 14:40:14,173 ERROR org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost: Failed to load coprocessor com.optymyze.coprocessors.ProductObserver
java.io.IOException: Failed on local exception: com.google.protobuf.InvalidProtocolBufferException: Protocol message end-group tag did not match expected tag.; Host Details : local host is: "mylocalhost/mylocalhostip"; destination host is: "mydestinationhost":9000;
at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:772)
to add co processor I did following -
hbase> disable 'PRODUCT_DETAILS'
hbase> alter 'PRODUCT_DETAILS', METHOD => 'table_att', 'coprocessor'=>'hdfs://mydestinationhost:9000/hbase-coprocessors-0.0.3-SNAPSHOT.jar|com.optymyze.coprocessors.ProductObserver|1001|arg1=1,arg2=2'
now enable 'PRODUCT_DETAILS' won't work.
co processor code is as follows-
package com.optymyze.coprocessors;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.slf4j.LoggerFactory.*;
/**
*
* Created by adnan on 14-03-2016.
*/
public class ProductObserver extends BaseRegionObserver {
private static final Logger LOGGER = getLogger(ProductObserver.class);
private static final String PRODUCT_DETAILS_TABLE = "PRODUCT_DETAILS";
public static final String COLUMN_FAMILY = "CF";
#Override
public void postPut(ObserverContext<RegionCoprocessorEnvironment> e, Put put, WALEdit edit, boolean writeToWAL) throws IOException {
List<KeyValue> kvs = put.getFamilyMap().get(Bytes.toBytes(COLUMN_FAMILY));
LOGGER.info("key values {}", kvs);
Map<String, Integer> qualifierVsValue = getMapForQualifierVsValuesForRequiredOnes(kvs);
LOGGER.info("qualifier values {}", qualifierVsValue);
List<Put> puts = createPuts(kvs, qualifierVsValue);
LOGGER.info("puts values {}", puts);
updateProductTable(e, puts);
LOGGER.info("puts done");
}
private void updateProductTable(ObserverContext<RegionCoprocessorEnvironment> e, List<Put> puts) throws IOException {
HTableInterface productTable = e.getEnvironment().getTable(Bytes.toBytes(PRODUCT_DETAILS_TABLE));
try {
productTable.put(puts);
}finally {
productTable.close();
}
}
private List<Put> createPuts(List<KeyValue> kvs, Map<String, Integer> qualifierVsValue) {
int salePrice, baseline = 0, finalPrice = 0;
List<Put> puts = new ArrayList<Put>(kvs.size());
for (KeyValue kv : kvs) {
if (kv.matchingQualifier(Bytes.toBytes("BASELINE"))) {
baseline = convertToZeroIfNull(qualifierVsValue, "PRICE")
- convertToZeroIfNull(qualifierVsValue, "PRICE")
* convertToZeroIfNull(qualifierVsValue, "DISCOUNT") / 100;
puts.add(newPut(kv, baseline));
}
if (kv.matchingQualifier(Bytes.toBytes("FINALPRICE"))) {
finalPrice = baseline + baseline * convertToZeroIfNull(qualifierVsValue, "UPLIFT") / 100;
puts.add(newPut(kv, finalPrice));
}
if (kv.matchingQualifier(Bytes.toBytes("SALEPRICE"))) {
salePrice = finalPrice * convertToZeroIfNull(qualifierVsValue, "VOLUME");
puts.add(newPut(kv, salePrice));
}
}
return puts;
}
private Map<String, Integer> getMapForQualifierVsValuesForRequiredOnes(List<KeyValue> kvs) {
Map<String, Integer> qualifierVsValue = new HashMap<String, Integer>();
for (KeyValue kv : kvs) {
getValueFromQualifier(kv, "PRICE", qualifierVsValue);
getValueFromQualifier(kv, "DISCOUNT", qualifierVsValue);
getValueFromQualifier(kv, "UPLIFT", qualifierVsValue);
getValueFromQualifier(kv, "VOLUME", qualifierVsValue);
}
return qualifierVsValue;
}
private Integer convertToZeroIfNull(Map<String, Integer> qualifierVsValue, String qualifier) {
Integer v = qualifierVsValue.get(qualifier);
return v == null ? 0 : v;
}
private void getValueFromQualifier(KeyValue kv, String qualifier, Map<String, Integer> qualifierVsValue) {
if (kv.matchingQualifier(Bytes.toBytes(qualifier))) {
qualifierVsValue.put(qualifier, Bytes.toInt(convertToByteZeroIfNull(kv)));
}
}
private Put newPut(KeyValue kv, int newVal) {
Put put = new Put(kv.getValue(), kv.getTimestamp());
put.add(kv.getFamily(), kv.getQualifier(), Bytes.toBytes(newVal));
return put;
}
private byte[] convertToByteZeroIfNull(KeyValue kv) {
return kv.getValue() == null ? Bytes.toBytes(0) : kv.getValue();
}
}
I have a field and a filter on it that just allows digits and ,
I want that if I type 1, that I automatically have 1,0 when I leave the text field.
I could parse it and check with a substring whether there is a , at the end. But that is not a very good way to do it in my opinion. Is there a better way to do it?
Use a converter in the text formatter you are using to filter the input:
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.util.function.UnaryOperator;
import javafx.application.Application;
import javafx.geometry.Pos;
import javafx.scene.Scene;
import javafx.scene.control.TextArea;
import javafx.scene.control.TextField;
import javafx.scene.control.TextFormatter;
import javafx.scene.control.TextFormatter.Change;
import javafx.scene.layout.VBox;
import javafx.stage.Stage;
import javafx.util.StringConverter;
public class DecimalTextField extends Application {
#Override
public void start(Stage primaryStage) {
// decimal formatter for default locale:
DecimalFormat decimalFormat = new DecimalFormat();
decimalFormat.setMinimumFractionDigits(1);
DecimalFormatSymbols symbols = decimalFormat.getDecimalFormatSymbols() ;
char decimalSep = symbols.getDecimalSeparator() ;
UnaryOperator<Change> filter = change -> {
for (char c : change.getText().toCharArray()) {
if ( (! Character.isDigit(c)) && c != decimalSep) {
return null ;
}
}
return change ;
};
StringConverter<Double> converter = new StringConverter<Double>() {
#Override
public String toString(Double object) {
return object == null ? "" : decimalFormat.format(object);
}
#Override
public Double fromString(String string) {
try {
return string.isEmpty() ? 0.0 : decimalFormat.parse(string).doubleValue();
} catch (ParseException e) {
return 0.0 ;
}
}
};
TextFormatter<Double> formatter = new TextFormatter<>(converter, 0.0, filter);
TextField textField = new TextField();
textField.setTextFormatter(formatter);
VBox root = new VBox(10, textField, new TextArea());
root.setAlignment(Pos.CENTER);
primaryStage.setScene(new Scene(root, 400, 400));
primaryStage.show();
}
public static void main(String[] args) {
launch(args);
}
}
(Obviously the filter could be improved here to, e.g. avoid multiple decimal separator characters in the input.)
I think the best would be to convert the string to a double and then convert the double back to a string using DecimalFormat. That way you know you'll know the number is in your desired format.