Spring Boot send only changed data - spring

I am building a game in Spring Boot on a server and classic Javascript on a backend.
Right now I have this:
...
#Autowired
private SimpMessagingTemplate template;
...
#Scheduled(fixedRate = 1000 / Constants.FPS)
public void renderClients() {
for(Game g : games) {
template.convertAndSend("/game/render/" + g.getId(), g);
}
}
...
Basically I have a multiple Games running and I send each with it's id to the client.
However the data I am sending (or the most of the data) is static (not changing)...
What if I want not to send the whole data but only parts which have changed.
Btw the response JSON looks like this:
{"id":"862b1dd8-48d5-4562-802a-7d669a5a5ed5","players":[{"id":"da8dcbec-7028-4a39-9547-a4e2dc321c3c","name":"John Doe","position":{"x":100.0,"y":100.0},"rotation":0.0,"hero":{"maxHealth":1300.0,"movementSpeed":4.5,"attackDamage":32.75,"width":68,"height":71,"heroName":"drowRanger","radius":34.0},"stats":{"kills":0,"lastHits":0},"lastClick":null}],"duration":380107.12}
and the only thing that is changing is duration and sometimes the x and y when the player moves...
Is it even possible?
Could I write some middleware that will do that at the time the objects are converted to JSON?

Maintain a data structure stores your changed value, and attach it to your Game Object.
When the time to send ,convert the map to a json ,and clear it.
Using this way may use more memory than before , but won't cost much time.

I DID IT!!
In my GameController I do:
#Scheduled(fixedRate = 1000 / Constants.FPS)
public void renderClients() throws Exception {
for(Game g : games) {
template.convertAndSend("/game/render/" + g.getId(), g.formatToSend());
}
}
Notice the g.formatToSend() method
here is how a Game class looks like:
public class Game {
private BandWidthOptimizer optimizer = new BandWidthOptimizer();
...
...
public String formatToSend() throws Exception {
return optimizer.optimize(this);
}
}
And Here Comes THE BandWidthOptimizer:
package com.iddqd.doto.optimization;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.sun.tools.classfile.Opcode;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import java.util.Iterator;
import java.util.Set;
import java.util.function.BiConsumer;
public class BandWidthOptimizer {
import com.fasterxml.jackson.databind.ObjectMapper;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
private String[] preserveKeys;
public BandWidthOptimizer() {
this.preserveKeys = new String[0];
}
public BandWidthOptimizer(String[] preserveKeys) {
this.preserveKeys = preserveKeys;
}
public String optimize(Object obj) throws Exception {
String json = mapper.writeValueAsString(obj);
Object nobj = parser.parse(json);
Object oobj = parser.parse(lastJSON);
JSONObject newJsonObj = (JSONObject)nobj;
JSONObject oldJsonObj = (JSONObject)oobj;
JSONObject res = getJSONObjectDiff(newJsonObj, oldJsonObj);
lastJSON = json;
return res.toJSONString();
}
private JSONObject getJSONObjectDiff(JSONObject obj1, JSONObject obj2) {
JSONObject res = new JSONObject();
Set set = obj1.keySet();
for (Object key : set) {
// If doesn't exist put it in the diff
if (!obj2.containsKey(key)) {
res.put(key, obj1.get(key));
} else {
// Get the values from both objects
Object val1 = obj1.get(key);
Object val2 = obj2.get(key);
// If their instances are of the same type
if(val1 == null) {
continue;
}
if(val2 == null) {
res.put(key, val1);
continue;
}
if (val1.getClass().equals(val2.getClass())) {
// If they are JSONObject
if (val1 instanceof JSONObject) {
// Recursively parse JSONObject with all of it's properties
JSONObject nested = getJSONObjectDiff((JSONObject) obj1.get(key), (JSONObject) obj2.get(key));
// If it contains any keys
if(nested.keySet().size() > 0) {
// Store the diff into final diff
res.put(key, nested);
}
// If they are JSONArrays
} else if (val1 instanceof JSONArray) {
// If val1 contains some values (is not empty)
if(((JSONArray) val1).size() > 0) {
// Get their diff
JSONArray arr = getJSONArrayDiff((JSONArray) val1, (JSONArray) val2);
// If array is not empty
if (arr.size() > 0) {
// put it into the diff
res.put(key, arr);
}
}
// If they are just a pure values
} else {
// Compare them - If they're not equal
if(!val1.equals(val2)) {
// put the val1 into diff
res.put(key, val1);
}
}
} else {
res.put(key, val1);
}
}
}
return res;
}
private JSONArray getJSONArrayDiff(JSONArray arr1, JSONArray arr2) {
JSONArray res = new JSONArray();
// For every element
for(int i = 0; i < arr1.size(); i++) {
Object val1 = arr1.get(i);
// If i is out of arr2 bounds
if(i > arr2.size()) {
// put the arr1 item into the diff
res.add(val1);
}
Object val2 = arr2.get(i);
if(val1 == null) {
continue;
}
if(val2 == null) {
res.add(val1);
continue;
}
// If their types are equal
if(val1.getClass().equals(val2.getClass())) {
// If they are JSONObjects
if(val1 instanceof JSONObject) {
// Get their diff
JSONObject obj = getJSONObjectDiff((JSONObject) val1, (JSONObject) val2);
// If it contains any keys
if(obj.keySet().size() > 0) {
// Store the diff into final diff
res.add(obj);
}
// If they are JSONArrays
} else if (val1 instanceof JSONArray) {
// Get their diff
JSONArray arr = getJSONArrayDiff((JSONArray) val1, (JSONArray) val2);
// If array is not empty
if(arr.size() > 0) {
// put it into the diff
res.add(arr);
}
// If they are just a pure values
} else {
// Compare them - If they're not equal
if(val1 != val2) {
// add the val1 into diff
res.add(val1);
}
}
} else {
res.add(val1);
}
}
return res;
}
}
This is it, now if nothing moves on the map the result JSON looks like this:
{"duration":282964.56}
because only the duration changes
But when my Player moves on the map see what happens:
{"duration":386676.06,"players":[{"position":{"x":556.5914801003707,"y":153.55964799554002}}]}
TODO
I have to implement a preserveKeys functionallity because I always want to send some keys like id and so on...

Related

Are there any Dart resources that would split a command-line String into a List<String> of arguments?

Are there any Dart resources that would split a command-line String into a List<String> of arguments?
ArgsParser takes a List<String> of already split arguments usually from main(List<String>).
To answer my own question,
I've converted a Java function I liked into a Dart Converter<String, List<String>) class:
import 'dart:convert';
/// Splits a `String` into a list of command-line argument parts.
/// e.g. "command -p param" -> ["command", "-p", "param"]
///
class CommandlineConverter extends Converter<String, List<String>>
{
#override
List<String> convert(String input)
{
if (input == null || input.isEmpty)
{
//no command? no string
return [];
}
final List<String> result = new List<String>();
var current = "";
String inQuote;
bool lastTokenHasBeenQuoted = false;
for (int index = 0; index < input.length; index++)
{
final token = input[index];
if (inQuote != null)
{
if (token == inQuote)
{
lastTokenHasBeenQuoted = true;
inQuote = null;
}
else
{
current += token;
}
}
else
{
switch (token)
{
case "'": // '
case '"': // ""
inQuote = token;
continue;
case " ": // space
if (lastTokenHasBeenQuoted || current.isNotEmpty)
{
result.add(current);
current = "";
}
break;
default:
current += token;
lastTokenHasBeenQuoted = false;
}
}
}
if (lastTokenHasBeenQuoted || current.isNotEmpty)
{
result.add(current);
}
if (inQuote != null)
{
throw new Exception("Unbalanced quote $inQuote in input:\n$input");
}
return result;
}
}

Use an alias on Pig UDF paramter

I need your help to know how to use an alias (stored tuple) on my Pig udf function, i exmplain:
my_file.csv
101,message here
102,message here
103,message here
...
My script PIG:
X = load'mydata.csv' using PigStorage(',') as (myVar:chararray);
A = load'my_file.csv' using PigStorage(',') as (key:chararray,value:chararray);
B = GROUP par ALL;
C = foreach B {
D = ORDER par BY key;
GENERATE BagToTuple(D);
};
the result of the C is something like (101,message here, 102, message here, 103, message here...)
Now what i need is to pass this result in my udf function like :
Z = foreach X generate MYUDF(myVar, C);
the alias "C" is the tuple key,value,key,value...
MYUDF :
import java.io.IOException;
import java.util.regex.Pattern;
import org.apache.pig.EvalFunc;
import org.apache.pig.data.Tuple;
import org.apache.pig.PigWarning;
import org.apache.pig.data.DataType;
import org.apache.pig.impl.util.WrappedIOException;
import org.apache.pig.impl.logicalLayer.schema.Schema;
public class ReDecode extends EvalFunc<String> {
int numParams = -1;
Pattern mPattern = null;
#Override
public Schema outputSchema(Schema input) {
try {
return new Schema(new Schema.FieldSchema(getSchemaName(this
.getClass().getName().toLowerCase(), input),
DataType.CHARARRAY));
} catch (Exception e) {
return null;
}
}
#Override
public String exec(Tuple tuple) throws IOException {
if (numParams==-1) // Not initialized
{
numParams = tuple.size();
if (numParams <= 2) {
String msg = "Decode: Atleast an expression and default string is required.";
throw new IOException(msg);
}
if (tuple.size()%2!=0) {
String msg = "ItssPigUDFs.ReDecode : Some parameters are unmatched.";
throw new IOException(msg);
}
}
if (tuple.get(0)==null)
return null;
try {
for (int count = 1; count < numParams - 1; count += 2)
{
mPattern=Pattern.compile((String)tuple.get(count));
if (mPattern.matcher((String)tuple.get(0)).matches())
{
return (String)tuple.get(count+1);
}
}
} catch (ClassCastException e) {
warn("ItssPigUDFs.ReDecode : Data type error", PigWarning.UDF_WARNING_1);
return null;
} catch (NullPointerException e) {
String msg = "ItssPigUDFs.ReDecode : Encounter null in the input";
throw new IOException(msg);
}
return (String)tuple.get(tuple.size()-1);
}
Thank you for your help
I don't think numParams is needed; the number of params that you get to the UDF will be input.size().
Therefore, if you call MYUDF(myVar, C), then you should be able to get those values in Java like String myVar = (String) input.get(0) and Tuple param2 = input.get(1).

Reading a file with newlines as a tuple in pig

Is it possible to change the record delimiter from newline to some other string so as to read a file with newlines into a single tuple in pig.
Yes.
A = LOAD '...' USING PigStorage(',') AS (...); //comma is the delimeter for fields
SET textinputformat.record.delimiter '<delimeter>'; // record delimeter, by default it is `\n`. You can change to any delimeter.
As mentioned here
You can use PigStorage
A = LOAD '/some/path/COMMA-DELIM-PREFIX*' USING PigStorage(',') AS (f1:chararray, ...);
B = LOAD '/some/path/SEMICOLON-DELIM-PREFIX*' USING PigStorage('\t') AS (f1:chararray, ...);
You can even try writing load/store UDF.
There is java code example for both load and store.
Load Functions : LoadFunc abstract class has the main methods for loading data and for most use cases it would suffice to extend it. You can read more here
Example
The loader implementation in the example is a loader for text data
with line delimiter as '\n' and '\t' as default field delimiter (which
can be overridden by passing a different field delimiter in the
constructor) - this is similar to current PigStorage loader in Pig.
The implementation uses an existing Hadoop supported Inputformat -
TextInputFormat - as the underlying InputFormat.
public class SimpleTextLoader extends LoadFunc {
protected RecordReader in = null;
private byte fieldDel = '\t';
private ArrayList<Object> mProtoTuple = null;
private TupleFactory mTupleFactory = TupleFactory.getInstance();
private static final int BUFFER_SIZE = 1024;
public SimpleTextLoader() {
}
/**
* Constructs a Pig loader that uses specified character as a field delimiter.
*
* #param delimiter
* the single byte character that is used to separate fields.
* ("\t" is the default.)
*/
public SimpleTextLoader(String delimiter) {
this();
if (delimiter.length() == 1) {
this.fieldDel = (byte)delimiter.charAt(0);
} else if (delimiter.length() > 1 & & delimiter.charAt(0) == '\\') {
switch (delimiter.charAt(1)) {
case 't':
this.fieldDel = (byte)'\t';
break;
case 'x':
fieldDel =
Integer.valueOf(delimiter.substring(2), 16).byteValue();
break;
case 'u':
this.fieldDel =
Integer.valueOf(delimiter.substring(2)).byteValue();
break;
default:
throw new RuntimeException("Unknown delimiter " + delimiter);
}
} else {
throw new RuntimeException("PigStorage delimeter must be a single character");
}
}
#Override
public Tuple getNext() throws IOException {
try {
boolean notDone = in.nextKeyValue();
if (!notDone) {
return null;
}
Text value = (Text) in.getCurrentValue();
byte[] buf = value.getBytes();
int len = value.getLength();
int start = 0;
for (int i = 0; i < len; i++) {
if (buf[i] == fieldDel) {
readField(buf, start, i);
start = i + 1;
}
}
// pick up the last field
readField(buf, start, len);
Tuple t = mTupleFactory.newTupleNoCopy(mProtoTuple);
mProtoTuple = null;
return t;
} catch (InterruptedException e) {
int errCode = 6018;
String errMsg = "Error while reading input";
throw new ExecException(errMsg, errCode,
PigException.REMOTE_ENVIRONMENT, e);
}
}
private void readField(byte[] buf, int start, int end) {
if (mProtoTuple == null) {
mProtoTuple = new ArrayList<Object>();
}
if (start == end) {
// NULL value
mProtoTuple.add(null);
} else {
mProtoTuple.add(new DataByteArray(buf, start, end));
}
}
#Override
public InputFormat getInputFormat() {
return new TextInputFormat();
}
#Override
public void prepareToRead(RecordReader reader, PigSplit split) {
in = reader;
}
#Override
public void setLocation(String location, Job job)
throws IOException {
FileInputFormat.setInputPaths(job, location);
}
}
Store Functions : StoreFunc abstract class has the main methods for storing data and for most use cases it should suffice to extend it
Example
The storer implementation in the example is a storer for text data
with line delimiter as '\n' and '\t' as default field delimiter (which
can be overridden by passing a different field delimiter in the
constructor) - this is similar to current PigStorage storer in Pig.
The implementation uses an existing Hadoop supported OutputFormat -
TextOutputFormat as the underlying OutputFormat.
public class SimpleTextStorer extends StoreFunc {
protected RecordWriter writer = null;
private byte fieldDel = '\t';
private static final int BUFFER_SIZE = 1024;
private static final String UTF8 = "UTF-8";
public PigStorage() {
}
public PigStorage(String delimiter) {
this();
if (delimiter.length() == 1) {
this.fieldDel = (byte)delimiter.charAt(0);
} else if (delimiter.length() > 1delimiter.charAt(0) == '\\') {
switch (delimiter.charAt(1)) {
case 't':
this.fieldDel = (byte)'\t';
break;
case 'x':
fieldDel =
Integer.valueOf(delimiter.substring(2), 16).byteValue();
break;
case 'u':
this.fieldDel =
Integer.valueOf(delimiter.substring(2)).byteValue();
break;
default:
throw new RuntimeException("Unknown delimiter " + delimiter);
}
} else {
throw new RuntimeException("PigStorage delimeter must be a single character");
}
}
ByteArrayOutputStream mOut = new ByteArrayOutputStream(BUFFER_SIZE);
#Override
public void putNext(Tuple f) throws IOException {
int sz = f.size();
for (int i = 0; i < sz; i++) {
Object field;
try {
field = f.get(i);
} catch (ExecException ee) {
throw ee;
}
putField(field);
if (i != sz - 1) {
mOut.write(fieldDel);
}
}
Text text = new Text(mOut.toByteArray());
try {
writer.write(null, text);
mOut.reset();
} catch (InterruptedException e) {
throw new IOException(e);
}
}
#SuppressWarnings("unchecked")
private void putField(Object field) throws IOException {
//string constants for each delimiter
String tupleBeginDelim = "(";
String tupleEndDelim = ")";
String bagBeginDelim = "{";
String bagEndDelim = "}";
String mapBeginDelim = "[";
String mapEndDelim = "]";
String fieldDelim = ",";
String mapKeyValueDelim = "#";
switch (DataType.findType(field)) {
case DataType.NULL:
break; // just leave it empty
case DataType.BOOLEAN:
mOut.write(((Boolean)field).toString().getBytes());
break;
case DataType.INTEGER:
mOut.write(((Integer)field).toString().getBytes());
break;
case DataType.LONG:
mOut.write(((Long)field).toString().getBytes());
break;
case DataType.FLOAT:
mOut.write(((Float)field).toString().getBytes());
break;
case DataType.DOUBLE:
mOut.write(((Double)field).toString().getBytes());
break;
case DataType.BYTEARRAY: {
byte[] b = ((DataByteArray)field).get();
mOut.write(b, 0, b.length);
break;
}
case DataType.CHARARRAY:
// oddly enough, writeBytes writes a string
mOut.write(((String)field).getBytes(UTF8));
break;
case DataType.MAP:
boolean mapHasNext = false;
Map<String, Object> m = (Map<String, Object>)field;
mOut.write(mapBeginDelim.getBytes(UTF8));
for(Map.Entry<String, Object> e: m.entrySet()) {
if(mapHasNext) {
mOut.write(fieldDelim.getBytes(UTF8));
} else {
mapHasNext = true;
}
putField(e.getKey());
mOut.write(mapKeyValueDelim.getBytes(UTF8));
putField(e.getValue());
}
mOut.write(mapEndDelim.getBytes(UTF8));
break;
case DataType.TUPLE:
boolean tupleHasNext = false;
Tuple t = (Tuple)field;
mOut.write(tupleBeginDelim.getBytes(UTF8));
for(int i = 0; i < t.size(); ++i) {
if(tupleHasNext) {
mOut.write(fieldDelim.getBytes(UTF8));
} else {
tupleHasNext = true;
}
try {
putField(t.get(i));
} catch (ExecException ee) {
throw ee;
}
}
mOut.write(tupleEndDelim.getBytes(UTF8));
break;
case DataType.BAG:
boolean bagHasNext = false;
mOut.write(bagBeginDelim.getBytes(UTF8));
Iterator<Tuple> tupleIter = ((DataBag)field).iterator();
while(tupleIter.hasNext()) {
if(bagHasNext) {
mOut.write(fieldDelim.getBytes(UTF8));
} else {
bagHasNext = true;
}
putField((Object)tupleIter.next());
}
mOut.write(bagEndDelim.getBytes(UTF8));
break;
default: {
int errCode = 2108;
String msg = "Could not determine data type of field: " + field;
throw new ExecException(msg, errCode, PigException.BUG);
}
}
}
#Override
public OutputFormat getOutputFormat() {
return new TextOutputFormat<WritableComparable, Text>();
}
#Override
public void prepareToWrite(RecordWriter writer) {
this.writer = writer;
}
#Override
public void setStoreLocation(String location, Job job) throws IOException {
job.getConfiguration().set("mapred.textoutputformat.separator", "");
FileOutputFormat.setOutputPath(job, new Path(location));
if (location.endsWith(".bz2")) {
FileOutputFormat.setCompressOutput(job, true);
FileOutputFormat.setOutputCompressorClass(job, BZip2Codec.class);
} else if (location.endsWith(".gz")) {
FileOutputFormat.setCompressOutput(job, true);
FileOutputFormat.setOutputCompressorClass(job, GzipCodec.class);
}
}
}

Trying to save comma-separated list

Trying to save selections from a CheckBoxList as a comma-separated list (string) in DB (one or more choices selected). I am using a proxy in order to save as a string because otherwise I'd have to create separate tables in the DB for a relation - the work is not worth it for this simple scenario and I was hoping that I could just convert it to a string and avoid that.
The CheckBoxList uses an enum for it's choices:
public enum Selection
{
Selection1,
Selection2,
Selection3
}
Not to be convoluted, but I use [Display(Name="Choice 1")] and an extension class to display something friendly on the UI. Not sure if I can save that string instead of just the enum, although I think if I save as enum it's not a big deal for me to "display" the friendly string on UI on some confirmation page.
This is the "Record" class that saves a string in the DB:
public virtual string MyCheckBox { get; set; }
This is the "Proxy", which is some sample I found but not directly dealing with enum, and which uses IEnumerable<string> (or should it be IEnumerable<Selection>?):
public IEnumerable<string> MyCheckBox
{
get
{
if (String.IsNullOrWhiteSpace(Record.MyCheckBox)) return new string[] { };
return Record
.MyCheckBox
.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries)
.Select(r => r.Trim())
.Where(r => !String.IsNullOrEmpty(r));
}
set
{
Record.MyCheckBox = value == null ? null : String.Join(",", value);
}
}
To save in the DB, I am trying to do this in a create class:
proxy.MyCheckBox = record.MyCheckBox; //getting error here
but am getting the error:
Cannot implicitly convert 'string' to System.Collections.Generic.IEnumerable'
I don't know, if it's possible or better, to use Parse or ToString from the API for enum values.
I know that doing something like this will store whatever I put in the ("") into the DB, so it's just a matter of figuring out how to overcome the error (or, if there is an alternative):
proxy.MyCheckBox = new[] {"foo", "bar"};
I am not good with this stuff and have just been digging and digging to come up with a solution. Any help is much appreciated.
You can accomplish this using a custom user type. The example below uses an ISet<string> on the class and stores the values as a delimited string.
[Serializable]
public class CommaDelimitedSet : IUserType
{
const string delimiter = ",";
#region IUserType Members
public new bool Equals(object x, object y)
{
if (ReferenceEquals(x, y))
{
return true;
}
var xSet = x as ISet<string>;
var ySet = y as ISet<string>;
if (xSet == null || ySet == null)
{
return false;
}
// compare set contents
return xSet.Except(ySet).Count() == 0 && ySet.Except(xSet).Count() == 0;
}
public int GetHashCode(object x)
{
return x.GetHashCode();
}
public object NullSafeGet(IDataReader rs, string[] names, object owner)
{
var outValue = NHibernateUtil.String.NullSafeGet(rs, names[0]) as string;
if (string.IsNullOrEmpty(outValue))
{
return new HashSet<string>();
}
else
{
var splitArray = outValue.Split(new[] {Delimiter}, StringSplitOptions.RemoveEmptyEntries);
return new HashSet<string>(splitArray);
}
}
public void NullSafeSet(IDbCommand cmd, object value, int index)
{
var inValue = value as ISet<string>;
object setValue = inValue == null ? null : string.Join(Delimiter, inValue);
NHibernateUtil.String.NullSafeSet(cmd, setValue, index);
}
public object DeepCopy(object value)
{
// return new ISet so that Equals can work
// see http://www.mail-archive.com/nhusers#googlegroups.com/msg11054.html
var set = value as ISet<string>;
if (set == null)
{
return null;
}
return new HashSet<string>(set);
}
public object Replace(object original, object target, object owner)
{
return original;
}
public object Assemble(object cached, object owner)
{
return DeepCopy(cached);
}
public object Disassemble(object value)
{
return DeepCopy(value);
}
public SqlType[] SqlTypes
{
get { return new[] {new SqlType(DbType.String)}; }
}
public Type ReturnedType
{
get { return typeof(ISet<string>); }
}
public bool IsMutable
{
get { return false; }
}
#endregion
}
Usage in mapping file:
Map(x => x.CheckboxValues.CustomType<CommaDelimitedSet>();

EmitMapper and List

It's the first time that I use EmitMapper.
I have a list of object ex: Customer and I would like to map this list in a ienumerable of CustomerDTO how can I do that?
Tnx
It's straightforward if you have a list and want to convert it to list of DTOs:
var mapper = ObjectMapperManager.DefaultInstance.GetMapper<Customer, CustomerDTO>();
IEnumerable<CustomerDTO> dtos = listOfCustomer.Select(mapper.map);
The preblem is when the list is in another object, for example User and UserDTO:
class User {
public List<Customer> Customers { get; set; }
}
class UserDTO {
public IEnumerable<CustomerDTO> Customers { get; set; }
}
It seems that EmitMapper does not support conversion from List to Enumerable. A way to support it would be:
var customerMapper = ObjectMapperManager
.DefaultInstance.GetMapper<Customer, CustomerDTO>();
var mapper = ObjectMapperManager.DefaultInstance
.GetMapper<User, UserDTO>(
new DefaultMapConfig()
.ConvertUsing<List<Customer>, IEnumerable<CustomerDTO>>(
a => a.Select(customerMapper.Map))
);
This can be done creating a custom class, implementing the interface "ICustomConverterProvider" and adding a ConvertGeneric to the "DefaultMapConfig".
Looking on the source code of EmitMapper, i found a class named "ArraysConverterProvider", which is the default generic converter from ICollections to Arrays.
Adapting the code from this class to work with IEnumerable collections:
class GenericIEnumerableConverterProvider : ICustomConverterProvider
{
public CustomConverterDescriptor GetCustomConverterDescr(
Type from,
Type to,
MapConfigBaseImpl mappingConfig)
{
var tFromTypeArgs = DefaultCustomConverterProvider.GetGenericArguments(from);
var tToTypeArgs = DefaultCustomConverterProvider.GetGenericArguments(to);
if (tFromTypeArgs == null || tToTypeArgs == null || tFromTypeArgs.Length != 1 || tToTypeArgs.Length != 1)
{
return null;
}
var tFrom = tFromTypeArgs[0];
var tTo = tToTypeArgs[0];
if (tFrom == tTo && (tFrom.IsValueType || mappingConfig.GetRootMappingOperation(tFrom, tTo).ShallowCopy))
{
return new CustomConverterDescriptor
{
ConversionMethodName = "Convert",
ConverterImplementation = typeof(GenericIEnumerableConverter_OneTypes<>),
ConverterClassTypeArguments = new[] { tFrom }
};
}
return new CustomConverterDescriptor
{
ConversionMethodName = "Convert",
ConverterImplementation = typeof(GenericIEnumerableConverter_DifferentTypes<,>),
ConverterClassTypeArguments = new[] { tFrom, tTo }
};
}
}
class GenericIEnumerableConverter_DifferentTypes<TFrom, TTo> : ICustomConverter
{
private Func<TFrom, TTo> _converter;
public IEnumerable<TTo> Convert(IEnumerable<TFrom> from, object state)
{
if (from == null)
{
return null;
}
TTo[] result = new TTo[from.Count()];
int idx = 0;
foreach (var f in from)
{
result[idx++] = _converter(f);
}
return result;
}
public void Initialize(Type from, Type to, MapConfigBaseImpl mappingConfig)
{
var staticConverters = mappingConfig.GetStaticConvertersManager() ?? StaticConvertersManager.DefaultInstance;
var staticConverterMethod = staticConverters.GetStaticConverter(typeof(TFrom), typeof(TTo));
if (staticConverterMethod != null)
{
_converter = (Func<TFrom, TTo>)Delegate.CreateDelegate(
typeof(Func<TFrom, TTo>),
null,
staticConverterMethod
);
}
else
{
_subMapper = ObjectMapperManager.DefaultInstance.GetMapperImpl(typeof(TFrom), typeof(TTo), mappingConfig);
_converter = ConverterBySubmapper;
}
}
ObjectsMapperBaseImpl _subMapper;
private TTo ConverterBySubmapper(TFrom from)
{
return (TTo)_subMapper.Map(from);
}
}
class GenericIEnumerableConverter_OneTypes<T>
{
public IEnumerable<T> Convert(IEnumerable<T> from, object state)
{
if (from == null)
{
return null;
}
return from;
}
}
This code is just a copy with a minimum of adaptation as possible and can be applyed to objects with many levels of hierarchy.
You can use the above code with the following command:
new DefaultMapConfig().ConvertGeneric(
typeof(IEnumerable<>),
typeof(IEnumerable<>),
new GenericIEnumerableConverterProvider());
This saved my day and I hope to save yours too! hehehe

Resources