I have this isolated storage helper and I need to use it to save and retrieve data from my universal app.
I don't know where to begin from. Should I maybe make an app and incorporate the helper class into it?
Here is my class:
using System.IO;
//using System.IO.IsolatedStorage;
using System.Runtime.Serialization.Json;
using System.Text;
public static class IsolatedStorageHelper
{
public static T GetObject<T>(string key)
{
var localSettings = Windows.Storage.ApplicationData.Current.LocalSettings;
if (localSettings.Values.ContainsKey(key))
{
string serializedObject = localSettings.Values[key].ToString();
return Deserialize<T>(serializedObject);
}
return default(T);
}
public static void SaveObject<T>(string key, T objectToSave)
{
var localSettings = Windows.Storage.ApplicationData.Current.LocalSettings;
string serializedObject = Serialize(objectToSave);
localSettings.Values[key] = serializedObject;
}
public static void DeleteObject(string key)
{
var localSettings = Windows.Storage.ApplicationData.Current.LocalSettings;
localSettings.Values.Remove(key);
}
private static string Serialize(object objectToSerialize)
{
using (MemoryStream ms = new MemoryStream())
{
DataContractJsonSerializer serializer = new DataContractJsonSerializer(objectToSerialize.GetType());
serializer.WriteObject(ms, objectToSerialize);
ms.Position = 0;
using (StreamReader reader = new StreamReader(ms))
{
return reader.ReadToEnd();
}
}
}
private static T Deserialize<T>(string jsonString)
{
using (MemoryStream ms = new MemoryStream(Encoding.Unicode.GetBytes(jsonString)))
{
DataContractJsonSerializer serializer = new DataContractJsonSerializer(typeof(T));
return (T)serializer.ReadObject(ms);
}
}
}
use static SaveObject method and supply the 'person' entity and a key. Retrieve it later using GetObject.
But I think in this scenario you should use database sqlite and sqlite net package to save the entity.
Related
I want to get the field name of a field which is created with Infer.Field<MyDocument>(doc => doc.StringField1).
Example code:
using System;
using Nest;
using Xunit;
namespace Elasticsearch.Tests
{
public class MyDocument
{
public string StringField1 { get; set; }
}
public class SerializeField
{
[Fact]
public void TestFieldName()
{
var connectionSettings = new ConnectionSettings(new Uri("http://myesdomain.com:9200"));
var client = new ElasticClient(connectionSettings);
var stringField = Infer.Field<MyDocument>(doc => doc.StringField1);
// TODO: Code to get then name of stringField when serialized
}
}
}
Can I leaverage the client in order to serialize the name of the stringField as it would do in any request?
Fortunately I found the answer myself:
Short:
client.SourceSerializer.Serialize(stringField, ms);
Complete:
using System;
using System.IO;
using System.Text;
using Nest;
using Xunit;
namespace Elasticsearch.Tests
{
public class MyDocument
{
public string StringField1 { get; set; }
}
public class SerializeField
{
[Fact]
public void TestFieldName()
{
var connectionSettings = new ConnectionSettings(new Uri("http://myesdomain.com:9200"));
var client = new ElasticClient(connectionSettings);
var stringField = Infer.Field<MyDocument>(doc => doc.StringField1);
string fieldName;
using (var ms = new MemoryStream())
{
client.SourceSerializer.Serialize(stringField, ms);
ms.Position = 0;
using (var reader = new StreamReader(ms, Encoding.UTF8))
{
fieldName = reader.ReadToEnd();
}
}
Assert.Equal("\"stringField1\"", fieldName);
}
}
}
There is a better solution that uses a Nest.FieldResolver.
using System;
using Nest;
using Xunit;
namespace Elasticsearch.Tests
{
public class MyDocument
{
public string StringField1 { get; set; }
}
public class TestClass
{
[Fact]
public void TestFieldName()
{
var connectionSettings = new ConnectionSettings(new Uri("http://myesdomain.com:9200"));
var fieldResolver = new FieldResolver(connectionSettings);
var stringField = Infer.Field<MyDocument>(doc => doc.StringField1);
var fieldName = fieldResolver.Resolve(stringField);
Assert.Equal("stringField1", fieldName);
}
}
}
I am using asp.net boilerplate and running a background job that posts data to an external API.
The post is happening correctly but the background job is still been abandoned instead of deleting it from the backgroundjobs table.
Is there a way to force a successful job execution and only abandon it if it fails.
Code Below
using Abp.Reflection.Extensions;
using EErx.Middleware.RestAPIClient.Dto;
using Erx.Middleware.Configuration;
using Erx.Middleware.Models;
using Erx.Middleware.RestAPIClient.Dto;
using Erx.Middleware.TCPCommunicator.Models;
using Microsoft.Extensions.Configuration;
using RestSharp;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Dynamic.Core;
using System.Text;
namespace Erx.Middleware.TCPCommunicator.Jobs
{
public class PDStatusUpdateJob : BackgroundJob<PDUpdateJobArgs>, ITransientDependency
{
private readonly Log _log;
private readonly IConfigurationRoot _appConfiguration;
private readonly IRepository<DispenseMessageHeader, long> _dispenseMessageHeaderRepository;
private readonly IRepository<DispenseMessageScript, long> _dispenseMessageScriptRepository;
private readonly IObjectMapper _objectMapper;
public PDStatusUpdateJob(
Log log,
IRepository<DispenseMessageHeader, long> dispenseMessageHeaderRepository,
IRepository<DispenseMessageScript, long> dispenseMessageScriptRepository,
IObjectMapper objectMapper
)
{
_log = log;
_dispenseMessageHeaderRepository = dispenseMessageHeaderRepository;
_dispenseMessageScriptRepository = dispenseMessageScriptRepository;
_objectMapper = objectMapper;
_appConfiguration = AppConfigurations.Get(
typeof(TcpCommunicatorModule).GetAssembly().GetDirectoryPathOrNull()
);
}
[UnitOfWork]
public override void Execute(PDUpdateJobArgs args)
{
try
{
var output = new PDDispenseMessageDto();
var scriptOutput = new List<PDDispenseMessageScriptDto>();
var headerRecord = _dispenseMessageHeaderRepository.FirstOrDefault(x => x.MessageGuid == args.MessageGuid);
var dispenseMessage = _objectMapper.Map(headerRecord, output);
var scripts = _dispenseMessageScriptRepository.GetAllIncluding(p => p.Items).Where(x => x.DispenseMessageHeaderId == headerRecord.Id).ToList();
dispenseMessage.Scripts = _objectMapper.Map(scripts, scriptOutput);
var request = new RestRequest(Method.POST);
var requestMsg = dispenseMessage.ToJsonString(true);
var client = new RestClient(_appConfiguration.GetValue<string>("PDUpdateAPI"))
{
Timeout = -1,
RemoteCertificateValidationCallback = (sender, certificate, chain, sslPolicyErrors) => true
};
request.AddHeader("Authorization", "Basic " + GenerateToken());
request.AddHeader("EntityDescription", "ERX");
request.AddHeader("Content-Type", "application/json");
request.AddParameter("application/json; charset=utf-8", requestMsg, ParameterType.RequestBody);
IRestResponse response = client.Execute(request);
if (response.ErrorMessage != null)
{
_log.Logger.Error(response.ErrorMessage);
}
}
catch (Exception e)
{
_log.Logger.Error(e.Message);
}
}
public static string GenerateToken()
{
var encoded = Convert.ToBase64String(Encoding.GetEncoding("ISO-8859-1").GetBytes("somestring"));
return encoded;
}
}
}
I am getting some extra information within my graphql results.
Apart from the data and the end errors I am getting
document
operation
perf
extensions
so the result is getting quite bulky. The other think I've noticed is that the initial loading of the documents and the intellisens are taking ages to load.
Any idea how I can get rid of this additional data?
Result of the graphQL query:
GraphQL Controller
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using ElectronConnectQuery.GraphQL;
using GraphQL;
using GraphQL.DataLoader;
using GraphQL.NewtonsoftJson;
using GraphQL.Types;
using GraphQL.Validation;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
namespace ElectronConnectQuery.Controllers.v1
{
[Route("[controller]")]
public class GraphQLController : Controller
{
private readonly IDocumentExecuter _documentExecuter;
private readonly ISchema _schema;
private readonly DataLoaderDocumentListener _listener;
private readonly ILogger<GraphQLController> _logger;
public GraphQLController(ISchema schema, IDocumentExecuter documentExecuter, DataLoaderDocumentListener listener, ILogger<GraphQLController> logger)
{
_schema = schema;
_documentExecuter = documentExecuter;
_listener = listener;
_logger = logger;
}
[HttpPost]
public async Task<IActionResult> Post([FromBody] GraphQLQuery query, [FromServices] IEnumerable<IValidationRule> validationRules)
{
if (query == null) { throw new ArgumentNullException(nameof(query)); }
_logger.LogDebug("GraphQL received query:{Query}", query.Query);
var inputs = query.Variables.ToInputs();
var executionOptions = new ExecutionOptions
{
Schema = _schema,
Query = query.Query,
Inputs = inputs,
ValidationRules = validationRules,
EnableMetrics = false
};
#if (DEBUG)
executionOptions.EnableMetrics = true;
#endif
executionOptions.Listeners.Add(_listener);
var result = await _documentExecuter.ExecuteAsync(executionOptions).ConfigureAwait(false);
if (result.Errors?.Count > 0)
{
return BadRequest(result);
}
return Ok(result);
}
}
}
Instead if writing the result yourself, use the IDocumentWriter, which will properly serialize the result.
/// <summary>
/// Serializes an object hierarchy to a stream. Typically this would be serializing an instance of the ExecutionResult class into a JSON stream.
/// </summary>
public interface IDocumentWriter
{
/// <summary>
/// Asynchronously serializes the specified object to the specified stream.
/// </summary>
Task WriteAsync<T>(Stream stream, T value, CancellationToken cancellationToken = default);
}
There is also an extension method to serialize to a string.
public static async Task<string> WriteToStringAsync<T>(this IDocumentWriter writer, T value)
This example shows using middleware vs. a controller but the idea is the same.
https://github.com/graphql-dotnet/examples/blob/529b530d7a6aad878b2757d776282fdc1cdcb595/src/AspNetCoreCustom/Example/GraphQLMiddleware.cs#L75-L81
private async Task WriteResponseAsync(HttpContext context, ExecutionResult result)
{
context.Response.ContentType = "application/json";
context.Response.StatusCode = result.Errors?.Any() == true ? (int)HttpStatusCode.BadRequest : (int)HttpStatusCode.OK;
await _writer.WriteAsync(context.Response.Body, result);
}
You will need to include GraphQL.SystemTextJson or GraphQL.NewtonSoftJson to choose your implementation of IDocumentWriter.
https://www.nuget.org/packages/GraphQL.SystemTextJson
https://www.nuget.org/packages/GraphQL.NewtonsoftJson
The change which I've done to the controller is:
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
using ElectronConnectQuery.GraphQL;
using GraphQL;
using GraphQL.DataLoader;
using GraphQL.Instrumentation;
using GraphQL.NewtonsoftJson;
using GraphQL.Types;
using GraphQL.Validation;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
namespace ElectronConnectQuery.Controllers.v1
{
[Route("[controller]")]
public class GraphQLController : Controller
{
private readonly IDocumentExecuter _documentExecuter;
private readonly ISchema _schema;
private readonly DataLoaderDocumentListener _listener;
private readonly ILogger<GraphQLController> _logger;
private readonly IDocumentWriter _writer;
public GraphQLController(ISchema schema, IDocumentExecuter documentExecuter, DataLoaderDocumentListener listener, ILogger<GraphQLController> logger, IDocumentWriter writer)
{
_schema = schema;
_documentExecuter = documentExecuter;
_listener = listener;
_logger = logger;
_writer = writer;
}
[HttpPost]
public async Task Post([FromBody] GraphQLQuery query, [FromServices] IEnumerable<IValidationRule> validationRules)
{
if (query == null) { throw new ArgumentNullException(nameof(query)); }
_logger.LogDebug("GraphQL received query:{Query}", query.Query);
var inputs = query.Variables.ToInputs();
var executionOptions = new ExecutionOptions
{
Schema = _schema,
Query = query.Query,
Inputs = inputs,
ValidationRules = validationRules,
EnableMetrics = false,
};
executionOptions.Listeners.Add(_listener);
var result = await _documentExecuter.ExecuteAsync(opts =>
{
opts.Schema = _schema;
opts.Query = query.Query;
opts.Inputs = inputs;
opts.ValidationRules = validationRules;
opts.FieldMiddleware.Use<InstrumentFieldsMiddleware>();
opts.EnableMetrics = true;
}).ConfigureAwait(false);
result.EnrichWithApolloTracing(DateTime.Now);
await _writer.WriteAsync(Response.Body, result);
}
private async Task WriteResponseAsync(HttpResponse response, ExecutionResult result)
{
response.ContentType = "application/json";
response.StatusCode = result.Errors?.Any() == true ? (int)HttpStatusCode.BadRequest : (int)HttpStatusCode.OK;
await _writer.WriteAsync(response.Body, result);
}
}
}
Startup.cs
In ConfigureServices
I have added the following lines
// kestrel
services.Configure<KestrelServerOptions>(options =>
{
options.AllowSynchronousIO = true;
});
// IIS
services.Configure<IISServerOptions>(options =>
{
options.AllowSynchronousIO = true;
});
Also I've registered the DocumentWriter
services.AddScoped<IDocumentWriter, GraphQL.NewtonsoftJson.DocumentWriter>();
This is design question/query so I hope it's Ok to post...
Pretty much every CRM OrgService and Context example you see is not static. Normally uses a using block. Which works of course, but could this be a static class rather than creating/destroying this over and over and over with using blocks everywhere? Is there any issue doing this?
Thanks for your advice :-)
Edit: Here's the code. Do you see any issue with this?...
CrmServiceContext is created by the early-bound entities generator.
static public class CRMOrgService
{
static private IOrganizationService _orgService = null;
static private CrmServiceContext _context = null;
static public IOrganizationService OrgService {
get
{
if (_orgService == null)
{
var reader = new AppSettingsReader();
Uri crmUri = new Uri(reader.GetValue("CRMOrgSvc", typeof(string)).ToString());
string crmUser = reader.GetValue("CRMUser", typeof(string)).ToString();
string crmPass = reader.GetValue("CRMPass", typeof(string)).ToString();
// Your client credentials
ClientCredentials clientCredentials = new ClientCredentials();
clientCredentials.UserName.UserName = crmUser;
clientCredentials.UserName.Password = crmPass;
// Create your Organization Service Proxy
var proxy = new OrganizationServiceProxy(crmUri, null, clientCredentials, null);
proxy.EnableProxyTypes();
_orgService = (IOrganizationService)proxy;
}
return _orgService;
}
}
static public CrmServiceContext Context
{
get
{
if (_context == null)
{
_context = new CrmServiceContext(OrgService);
}
return _context;
}
}
static public void CloseCleanUp()
{
_context.ClearChanges();
_context.Dispose();
_context = null;
_orgService = null;
}
} // end class
Yes, it can be static. However, you have to keep in mind this instance will not be thread safe. This means, the connection instance can not be used simultaneous by multiple threads.
Just a quick question. In WP7, is it really bad design/idea to store complex data using IsolatedStorageSettings.ApplicationSettings? I want to save a collection of some class objects. The properties are marked with [DataMember] attributes.
An example of a class would be,
[DataContract]
public class OfflineItem
{
[DataMember]
public string Id { get; set; }
[DataMember]
public MyItem Item { get; set; }
[DataMember]
public Dictionary<string, string> KeyValues { get; set; }
}
Collection<OfflineItems> offlineItems = new Collection<OfflineItems>();
.....
IsolatedStorageSettings.ApplicationSettings["AllOfflineItems"] = offlineItems;
I tried it and it worked, but I want to know if it is a correct approach and will there be any performance hit in the long run?
#Jonna. I deliberated over this one too. I ended up using/adapating the following generic methods to serialize and deserialize using a IsolatedStorageFile as below. It includes deleting a file if it already exists as you are trying to update the data.
internal static void Write<T>(T obj, string fileName)
{
XmlWriterSettings writerSettings = new XmlWriterSettings
{
Indent = true,
IndentChars = "\t"
};
try
{
using (var isoStore = IsolatedStorageFile.GetUserStoreForApplication())
{
if (isoStore.FileExists(fileName))
{
isoStore.DeleteFile(fileName);
}
using (var isoStream = new IsolatedStorageFileStream(fileName, FileMode.Create, isoStore))
{
XmlSerializer serializer = new XmlSerializer(typeof(T));
using (XmlWriter xmlWriter = XmlWriter.Create(isoStream, writerSettings))
{
serializer.Serialize(xmlWriter, obj);
}
}
}
}
catch (IsolatedStorageException ex)
{
Debug.WriteLine(ex.Message);
}
catch (Exception emAll)
{
Debug.WriteLine(emAll.Message);
}
}
internal static T Read<T>(string fileName)
{
try
{
using (var isoStore = IsolatedStorageFile.GetUserStoreForApplication())
{
using (var isoStream = new IsolatedStorageFileStream(fileName, FileMode.Open, isoStore))
{
XmlSerializer serializer = new XmlSerializer(typeof(T));
return (T)serializer.Deserialize(isoStream);
}
}
}
catch (IsolatedStorageException ex)
{
Debug.WriteLine(ex.Message);
throw;
}
catch (Exception emAll)
{
Debug.WriteLine(emAll.Message);
throw;
}
}
Serialization would be called thus:
Serialization.Write<user>(userDetails, App.USERDETAILS);
And deserialization would be called thus:
Items = Serialization.Read<measurements>(App.MEASUREMENTS);
user is a class and userDetails is an object based on the that class. Measurements is a class and Items is an object based on that class. App.USERDETAILS & App.MEASUREMENTS are global strings that contain file names.
Some debug lines have been left in just so progress can be tracked.
It might also be worth considering using SQL + LINQ if you are thinking of migrating to Mango and much of this could be taken care of there...
I would serialize my data (either XML or Binary) to a separate file in IsolatedStorage. Because if IsolatedStorageSettings.ApplicationSettings is overcrowded it will take longer to load any of individual settings.
Here is a general purpose method to serialize your object to xml
public static string SerializeXml(object objectToSerialize)
{
using (var ms = new MemoryStream())
{
var serializer = new XmlSerializer(objectToSerialize.GetType());
serializer.Serialize(ms, objectToSerialize);
ms.Position = 0;
using (var reader = new StreamReader(ms))
{
return reader.ReadToEnd();
}
}
}